mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-05-01 02:43:16 +00:00
Compare commits
57 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5a54ddfab9 | ||
|
|
e1576ca00d | ||
|
|
1aa26662ef | ||
|
|
4b9696023a | ||
|
|
8a95fe8b00 | ||
|
|
a389969ace | ||
|
|
3221b25393 | ||
|
|
cc3fefd325 | ||
|
|
90c278ad87 | ||
|
|
5ece46c56e | ||
|
|
f158b6e6c1 | ||
|
|
2d84387d84 | ||
|
|
12618ddbd4 | ||
|
|
7fac03ec56 | ||
|
|
199e53112c | ||
|
|
f8237ce76d | ||
|
|
8a9d18cc86 | ||
|
|
b7b69d918a | ||
|
|
a3f08377b2 | ||
|
|
4c9c8e0514 | ||
|
|
3911b83145 | ||
|
|
20e1d7c08b | ||
|
|
24aa8afde8 | ||
|
|
71c6ec0b00 | ||
|
|
469e32e15b | ||
|
|
ebfb46aa78 | ||
|
|
fa2d2d6971 | ||
|
|
a1429d0266 | ||
|
|
3077ac770f | ||
|
|
7813cd751a | ||
|
|
4461c18957 | ||
|
|
34cf93a538 | ||
|
|
b56539f17e | ||
|
|
167d4bae90 | ||
|
|
de85c11bd1 | ||
|
|
d0731b111c | ||
|
|
7cd96ef3b6 | ||
|
|
b7a44a593e | ||
|
|
c413963ecb | ||
|
|
4f7d44aa10 | ||
|
|
56055d4f1e | ||
|
|
3fa4a9c073 | ||
|
|
1552bae77b | ||
|
|
a9cad80a36 | ||
|
|
fc79283662 | ||
|
|
0d8ea31781 | ||
|
|
7ac14f964b | ||
|
|
95d725a3cc | ||
|
|
95de8bd39d | ||
|
|
ecbdaca971 | ||
|
|
54754f7e81 | ||
|
|
bd15741647 | ||
|
|
fa05ca3f63 | ||
|
|
29992f1fbf | ||
|
|
34ab07865f | ||
|
|
5fa1fc65ca | ||
|
|
c0bb56c454 |
4
.github/BOTMETA.yml
vendored
4
.github/BOTMETA.yml
vendored
@@ -259,7 +259,7 @@ files:
|
||||
$modules/cloud/lxc/lxc_container.py:
|
||||
maintainers: cloudnull
|
||||
$modules/cloud/lxd/:
|
||||
maintainers: hnakamur
|
||||
ignore: hnakamur
|
||||
$modules/cloud/memset/:
|
||||
maintainers: glitchcrab
|
||||
$modules/cloud/misc/cloud_init_data_facts.py:
|
||||
@@ -576,7 +576,7 @@ files:
|
||||
$modules/net_tools/nmcli.py:
|
||||
maintainers: alcamie101
|
||||
$modules/net_tools/snmp_facts.py:
|
||||
maintainers: ogenstad bigmstone ujwalkomarla
|
||||
maintainers: ogenstad ujwalkomarla
|
||||
$modules/notification/osx_say.py:
|
||||
maintainers: ansible mpdehaan
|
||||
labels: _osx_say
|
||||
|
||||
133
CHANGELOG.rst
133
CHANGELOG.rst
@@ -5,6 +5,139 @@ Community General Release Notes
|
||||
.. contents:: Topics
|
||||
|
||||
|
||||
v1.3.5
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Regular bugfix release.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- dnsmadeeasy - fix HTTP 400 errors when creating a TXT record (https://github.com/ansible-collections/community.general/issues/1237).
|
||||
- docker_container - allow IPv6 zones (RFC 4007) in bind IPs (https://github.com/ansible-collections/community.docker/pull/66).
|
||||
- docker_image - fix crash on loading images with versions of Docker SDK for Python before 2.5.0 (https://github.com/ansible-collections/community.docker/issues/72, https://github.com/ansible-collections/community.docker/pull/73).
|
||||
- homebrew - add default search path for ``brew`` on Apple silicon hardware (https://github.com/ansible-collections/community.general/pull/1679).
|
||||
- homebrew_cask - add default search path for ``brew`` on Apple silicon hardware (https://github.com/ansible-collections/community.general/pull/1679).
|
||||
- homebrew_tap - add default search path for ``brew`` on Apple silicon hardware (https://github.com/ansible-collections/community.general/pull/1679).
|
||||
- lldp - use ``get_bin_path`` to locate the ``lldpctl`` executable (https://github.com/ansible-collections/community.general/pull/1643).
|
||||
- onepassword lookup plugin - updated to support password items, which place the password field directly in the payload's ``details`` attribute (https://github.com/ansible-collections/community.general/pull/1610).
|
||||
- passwordstore lookup plugin - fix compatibility with gopass when used with ``create=true``. While pass returns 1 on a non-existent password, gopass returns 10, or 11, depending on whether a similar named password was stored. We now just check standard output and that the return code is not zero (https://github.com/ansible-collections/community.general/pull/1589).
|
||||
- terraform - improve result code checking when executing terraform commands (https://github.com/ansible-collections/community.general/pull/1632).
|
||||
|
||||
v1.3.4
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Bugfix/security release that addresses CVE-2021-20180.
|
||||
|
||||
Security Fixes
|
||||
--------------
|
||||
|
||||
- bitbucket_pipeline_variable - **CVE-2021-20180** - hide user sensitive information which are marked as ``secured`` from logging into the console (https://github.com/ansible-collections/community.general/pull/1635).
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- npm - handle json decode exception while parsing command line output (https://github.com/ansible-collections/community.general/issues/1614).
|
||||
|
||||
v1.3.3
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Bugfix/security release that addresses CVE-2021-20178.
|
||||
|
||||
Major Changes
|
||||
-------------
|
||||
|
||||
- For community.general 2.0.0, the kubevirt modules will be moved to the `community.kubevirt <https://galaxy.ansible.com/community/kubevirt>`_ collection.
|
||||
A redirection will be inserted so that users using ansible-base 2.10 or newer do not have to change anything.
|
||||
|
||||
If you use Ansible 2.9 and explicitly use kubevirt modules from this collection, you will need to adjust your playbooks and roles to use FQCNs starting with ``community.kubevirt.`` instead of ``community.general.``,
|
||||
for example replace ``community.general.kubevirt_vm`` in a task by ``community.kubevirt.kubevirt_vm``.
|
||||
|
||||
If you use ansible-base and installed ``community.general`` manually and rely on the kubevirt modules, you have to make sure to install the ``community.kubevirt`` collection as well.
|
||||
If you are using FQCNs, for example ``community.general.kubevirt_vm`` instead of ``kubevirt_vm``, it will continue working, but we still recommend to adjust the FQCNs as well.
|
||||
|
||||
Security Fixes
|
||||
--------------
|
||||
|
||||
- snmp_facts - **CVE-2021-20178** - hide user sensitive information such as ``privkey`` and ``authkey`` from logging into the console (https://github.com/ansible-collections/community.general/pull/1621).
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- terraform - fix ``init_reconfigure`` option for proper CLI args (https://github.com/ansible-collections/community.general/pull/1620).
|
||||
|
||||
v1.3.2
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Regular bugfix release.
|
||||
|
||||
Major Changes
|
||||
-------------
|
||||
|
||||
- For community.general 2.0.0, the Google modules will be moved to the `community.google <https://galaxy.ansible.com/community/google>`_ collection.
|
||||
A redirection will be inserted so that users using ansible-base 2.10 or newer do not have to change anything.
|
||||
|
||||
If you use Ansible 2.9 and explicitly use Google modules from this collection, you will need to adjust your playbooks and roles to use FQCNs starting with ``community.google.`` instead of ``community.general.``,
|
||||
for example replace ``community.general.gcpubsub`` in a task by ``community.google.gcpubsub``.
|
||||
|
||||
If you use ansible-base and installed ``community.general`` manually and rely on the Google modules, you have to make sure to install the ``community.google`` collection as well.
|
||||
If you are using FQCNs, for example ``community.general.gcpubsub`` instead of ``gcpubsub``, it will continue working, but we still recommend to adjust the FQCNs as well.
|
||||
- For community.general 2.0.0, the OC connection plugin will be moved to the `community.okd <https://galaxy.ansible.com/community/okd>`_ collection.
|
||||
A redirection will be inserted so that users using ansible-base 2.10 or newer do not have to change anything.
|
||||
|
||||
If you use Ansible 2.9 and explicitly use OC connection plugin from this collection, you will need to adjust your playbooks and roles to use FQCNs ``community.okd.oc`` instead of ``community.general.oc``.
|
||||
|
||||
If you use ansible-base and installed ``community.general`` manually and rely on the OC connection plugin, you have to make sure to install the ``community.okd`` collection as well.
|
||||
If you are using FQCNs, in other words ``community.general.oc`` instead of ``oc``, it will continue working, but we still recommend to adjust this FQCN as well.
|
||||
- For community.general 2.0.0, the hashi_vault lookup plugin will be moved to the `community.hashi_vault <https://galaxy.ansible.com/community/hashi_vault>`_ collection.
|
||||
A redirection will be inserted so that users using ansible-base 2.10 or newer do not have to change anything.
|
||||
|
||||
If you use Ansible 2.9 and explicitly use hashi_vault lookup plugin from this collection, you will need to adjust your playbooks and roles to use FQCNs ``community.hashi_vault.hashi_vault`` instead of ``community.general.hashi_vault``.
|
||||
|
||||
If you use ansible-base and installed ``community.general`` manually and rely on the hashi_vault lookup plugin, you have to make sure to install the ``community.hashi_vault`` collection as well.
|
||||
If you are using FQCNs, in other words ``community.general.hashi_vault`` instead of ``hashi_vault``, it will continue working, but we still recommend to adjust this FQCN as well.
|
||||
|
||||
Minor Changes
|
||||
-------------
|
||||
|
||||
- homebrew_cask - Homebrew will be deprecating use of ``brew cask`` commands as of version 2.6.0, see https://brew.sh/2020/12/01/homebrew-2.6.0/. Added logic to stop using ``brew cask`` for brew version >= 2.6.0 (https://github.com/ansible-collections/community.general/pull/1481).
|
||||
- jira - added the traceback output to ``fail_json()`` calls deriving from exceptions (https://github.com/ansible-collections/community.general/pull/1536).
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- docker_image - if ``push=true`` is used with ``repository``, and the image does not need to be tagged, still push. This can happen if ``repository`` and ``name`` are equal (https://github.com/ansible-collections/community.docker/issues/52, https://github.com/ansible-collections/community.docker/pull/53).
|
||||
- docker_image - report error when loading a broken archive that contains no image (https://github.com/ansible-collections/community.docker/issues/46, https://github.com/ansible-collections/community.docker/pull/55).
|
||||
- docker_image - report error when the loaded archive does not contain the specified image (https://github.com/ansible-collections/community.docker/issues/41, https://github.com/ansible-collections/community.docker/pull/55).
|
||||
- jira - ``fetch`` and ``search`` no longer indicate that something changed (https://github.com/ansible-collections/community.general/pull/1536).
|
||||
- jira - ensured parameter ``issue`` is mandatory for operation ``transition`` (https://github.com/ansible-collections/community.general/pull/1536).
|
||||
- jira - module no longer incorrectly reports change for information gathering operations (https://github.com/ansible-collections/community.general/pull/1536).
|
||||
- jira - replaced custom parameter validation with ``required_if`` (https://github.com/ansible-collections/community.general/pull/1536).
|
||||
- launchd - handle deprecated APIs like ``readPlist`` and ``writePlist`` in ``plistlib`` (https://github.com/ansible-collections/community.general/issues/1552).
|
||||
- ldap_search - the module no longer incorrectly reports a change (https://github.com/ansible-collections/community.general/issues/1040).
|
||||
- make - fixed ``make`` parameter used for check mode when running a non-GNU ``make`` (https://github.com/ansible-collections/community.general/pull/1574).
|
||||
- monit - add support for all monit service checks (https://github.com/ansible-collections/community.general/pull/1532).
|
||||
- nios_member - fix Python 3 compatibility with nios api ``member_normalize`` function (https://github.com/ansible-collections/community.general/issues/1526).
|
||||
- nmcli - remove ``bridge-slave`` from list of IP based connections ((https://github.com/ansible-collections/community.general/issues/1500).
|
||||
- pamd - added logic to retain the comment line (https://github.com/ansible-collections/community.general/issues/1394).
|
||||
- passwordstore lookup plugin - always use explicit ``show`` command to retrieve password. This ensures compatibility with ``gopass`` and avoids problems when password names equal ``pass`` commands (https://github.com/ansible-collections/community.general/pull/1493).
|
||||
- rhn_channel - Python 2.7.5 fails if the certificate should not be validated. Fixed this by creating the correct ``ssl_context`` (https://github.com/ansible-collections/community.general/pull/470).
|
||||
- sendgrid - update documentation and warn user about sendgrid Python library version (https://github.com/ansible-collections/community.general/issues/1553).
|
||||
- syslogger - update ``syslog.openlog`` API call for older Python versions, and improve error handling (https://github.com/ansible-collections/community.general/issues/953).
|
||||
- yaml callback plugin - do not remove non-ASCII Unicode characters from multiline string output (https://github.com/ansible-collections/community.general/issues/1519).
|
||||
|
||||
v1.3.1
|
||||
======
|
||||
|
||||
|
||||
@@ -1627,3 +1627,226 @@ releases:
|
||||
- fix_parsing_array_values_in_osx_defaults.yml
|
||||
- nios_host_record-fix-aliases-removal.yml
|
||||
release_date: '2020-12-21'
|
||||
1.3.2:
|
||||
changes:
|
||||
bugfixes:
|
||||
- docker_image - if ``push=true`` is used with ``repository``, and the image
|
||||
does not need to be tagged, still push. This can happen if ``repository``
|
||||
and ``name`` are equal (https://github.com/ansible-collections/community.docker/issues/52,
|
||||
https://github.com/ansible-collections/community.docker/pull/53).
|
||||
- docker_image - report error when loading a broken archive that contains no
|
||||
image (https://github.com/ansible-collections/community.docker/issues/46,
|
||||
https://github.com/ansible-collections/community.docker/pull/55).
|
||||
- docker_image - report error when the loaded archive does not contain the specified
|
||||
image (https://github.com/ansible-collections/community.docker/issues/41,
|
||||
https://github.com/ansible-collections/community.docker/pull/55).
|
||||
- jira - ``fetch`` and ``search`` no longer indicate that something changed
|
||||
(https://github.com/ansible-collections/community.general/pull/1536).
|
||||
- jira - ensured parameter ``issue`` is mandatory for operation ``transition``
|
||||
(https://github.com/ansible-collections/community.general/pull/1536).
|
||||
- jira - module no longer incorrectly reports change for information gathering
|
||||
operations (https://github.com/ansible-collections/community.general/pull/1536).
|
||||
- jira - replaced custom parameter validation with ``required_if`` (https://github.com/ansible-collections/community.general/pull/1536).
|
||||
- launchd - handle deprecated APIs like ``readPlist`` and ``writePlist`` in
|
||||
``plistlib`` (https://github.com/ansible-collections/community.general/issues/1552).
|
||||
- ldap_search - the module no longer incorrectly reports a change (https://github.com/ansible-collections/community.general/issues/1040).
|
||||
- make - fixed ``make`` parameter used for check mode when running a non-GNU
|
||||
``make`` (https://github.com/ansible-collections/community.general/pull/1574).
|
||||
- monit - add support for all monit service checks (https://github.com/ansible-collections/community.general/pull/1532).
|
||||
- nios_member - fix Python 3 compatibility with nios api ``member_normalize``
|
||||
function (https://github.com/ansible-collections/community.general/issues/1526).
|
||||
- nmcli - remove ``bridge-slave`` from list of IP based connections ((https://github.com/ansible-collections/community.general/issues/1500).
|
||||
- pamd - added logic to retain the comment line (https://github.com/ansible-collections/community.general/issues/1394).
|
||||
- passwordstore lookup plugin - always use explicit ``show`` command to retrieve
|
||||
password. This ensures compatibility with ``gopass`` and avoids problems when
|
||||
password names equal ``pass`` commands (https://github.com/ansible-collections/community.general/pull/1493).
|
||||
- rhn_channel - Python 2.7.5 fails if the certificate should not be validated.
|
||||
Fixed this by creating the correct ``ssl_context`` (https://github.com/ansible-collections/community.general/pull/470).
|
||||
- sendgrid - update documentation and warn user about sendgrid Python library
|
||||
version (https://github.com/ansible-collections/community.general/issues/1553).
|
||||
- syslogger - update ``syslog.openlog`` API call for older Python versions,
|
||||
and improve error handling (https://github.com/ansible-collections/community.general/issues/953).
|
||||
- yaml callback plugin - do not remove non-ASCII Unicode characters from multiline
|
||||
string output (https://github.com/ansible-collections/community.general/issues/1519).
|
||||
major_changes:
|
||||
- 'For community.general 2.0.0, the Google modules will be moved to the `community.google
|
||||
<https://galaxy.ansible.com/community/google>`_ collection.
|
||||
|
||||
A redirection will be inserted so that users using ansible-base 2.10 or newer
|
||||
do not have to change anything.
|
||||
|
||||
|
||||
If you use Ansible 2.9 and explicitly use Google modules from this collection,
|
||||
you will need to adjust your playbooks and roles to use FQCNs starting with
|
||||
``community.google.`` instead of ``community.general.``,
|
||||
|
||||
for example replace ``community.general.gcpubsub`` in a task by ``community.google.gcpubsub``.
|
||||
|
||||
|
||||
If you use ansible-base and installed ``community.general`` manually and rely
|
||||
on the Google modules, you have to make sure to install the ``community.google``
|
||||
collection as well.
|
||||
|
||||
If you are using FQCNs, for example ``community.general.gcpubsub`` instead
|
||||
of ``gcpubsub``, it will continue working, but we still recommend to adjust
|
||||
the FQCNs as well.
|
||||
|
||||
'
|
||||
- 'For community.general 2.0.0, the OC connection plugin will be moved to the
|
||||
`community.okd <https://galaxy.ansible.com/community/okd>`_ collection.
|
||||
|
||||
A redirection will be inserted so that users using ansible-base 2.10 or newer
|
||||
do not have to change anything.
|
||||
|
||||
|
||||
If you use Ansible 2.9 and explicitly use OC connection plugin from this collection,
|
||||
you will need to adjust your playbooks and roles to use FQCNs ``community.okd.oc``
|
||||
instead of ``community.general.oc``.
|
||||
|
||||
|
||||
If you use ansible-base and installed ``community.general`` manually and rely
|
||||
on the OC connection plugin, you have to make sure to install the ``community.okd``
|
||||
collection as well.
|
||||
|
||||
If you are using FQCNs, in other words ``community.general.oc`` instead of
|
||||
``oc``, it will continue working, but we still recommend to adjust this FQCN
|
||||
as well.
|
||||
|
||||
'
|
||||
- 'For community.general 2.0.0, the hashi_vault lookup plugin will be moved
|
||||
to the `community.hashi_vault <https://galaxy.ansible.com/community/hashi_vault>`_
|
||||
collection.
|
||||
|
||||
A redirection will be inserted so that users using ansible-base 2.10 or newer
|
||||
do not have to change anything.
|
||||
|
||||
|
||||
If you use Ansible 2.9 and explicitly use hashi_vault lookup plugin from this
|
||||
collection, you will need to adjust your playbooks and roles to use FQCNs
|
||||
``community.hashi_vault.hashi_vault`` instead of ``community.general.hashi_vault``.
|
||||
|
||||
|
||||
If you use ansible-base and installed ``community.general`` manually and rely
|
||||
on the hashi_vault lookup plugin, you have to make sure to install the ``community.hashi_vault``
|
||||
collection as well.
|
||||
|
||||
If you are using FQCNs, in other words ``community.general.hashi_vault`` instead
|
||||
of ``hashi_vault``, it will continue working, but we still recommend to adjust
|
||||
this FQCN as well.
|
||||
|
||||
'
|
||||
minor_changes:
|
||||
- homebrew_cask - Homebrew will be deprecating use of ``brew cask`` commands
|
||||
as of version 2.6.0, see https://brew.sh/2020/12/01/homebrew-2.6.0/. Added
|
||||
logic to stop using ``brew cask`` for brew version >= 2.6.0 (https://github.com/ansible-collections/community.general/pull/1481).
|
||||
- jira - added the traceback output to ``fail_json()`` calls deriving from exceptions
|
||||
(https://github.com/ansible-collections/community.general/pull/1536).
|
||||
release_summary: Regular bugfix release.
|
||||
fragments:
|
||||
- 1.3.2.yml
|
||||
- 1040-ldap_search-changed-must-be-false.yaml
|
||||
- 1394-pamd-removing-comments.yaml
|
||||
- 1481-deprecated-brew-cask-command.yaml
|
||||
- 1493-fix_passwordstore.py_to_be_compatible_with_gopass_versions.yml
|
||||
- 1517-bridge-slave-from-list-of-ip-based-connections.yml
|
||||
- 1522-yaml-callback-unicode.yml
|
||||
- 1527-fix-nios-api-member-normalize.yaml
|
||||
- 1532-monit-support-all-services.yaml
|
||||
- 1552_launchd.yml
|
||||
- 1553_sendgrid.yml
|
||||
- 1574-make-question.yaml
|
||||
- 470-spacewalk-legacy-python-certificate-validation.yaml
|
||||
- 953_syslogger.yml
|
||||
- community.docker-53-docker_image-tag-push.yml
|
||||
- community.docker-55-docker_image-loading.yml
|
||||
- google-migration.yml
|
||||
- hashi_vault-migration.yml
|
||||
- jira_improvements.yaml
|
||||
- oc-migration.yml
|
||||
release_date: '2021-01-04'
|
||||
1.3.3:
|
||||
changes:
|
||||
bugfixes:
|
||||
- terraform - fix ``init_reconfigure`` option for proper CLI args (https://github.com/ansible-collections/community.general/pull/1620).
|
||||
major_changes:
|
||||
- 'For community.general 2.0.0, the kubevirt modules will be moved to the `community.kubevirt
|
||||
<https://galaxy.ansible.com/community/kubevirt>`_ collection.
|
||||
|
||||
A redirection will be inserted so that users using ansible-base 2.10 or newer
|
||||
do not have to change anything.
|
||||
|
||||
|
||||
If you use Ansible 2.9 and explicitly use kubevirt modules from this collection,
|
||||
you will need to adjust your playbooks and roles to use FQCNs starting with
|
||||
``community.kubevirt.`` instead of ``community.general.``,
|
||||
|
||||
for example replace ``community.general.kubevirt_vm`` in a task by ``community.kubevirt.kubevirt_vm``.
|
||||
|
||||
|
||||
If you use ansible-base and installed ``community.general`` manually and rely
|
||||
on the kubevirt modules, you have to make sure to install the ``community.kubevirt``
|
||||
collection as well.
|
||||
|
||||
If you are using FQCNs, for example ``community.general.kubevirt_vm`` instead
|
||||
of ``kubevirt_vm``, it will continue working, but we still recommend to adjust
|
||||
the FQCNs as well.
|
||||
|
||||
'
|
||||
release_summary: Bugfix/security release that addresses CVE-2021-20178.
|
||||
security_fixes:
|
||||
- snmp_facts - **CVE-2021-20178** - hide user sensitive information such as
|
||||
``privkey`` and ``authkey`` from logging into the console (https://github.com/ansible-collections/community.general/pull/1621).
|
||||
fragments:
|
||||
- 1.3.3.yml
|
||||
- 1620-terraform_init_reconfigure_fix.yml
|
||||
- kubevirt-migration.yml
|
||||
- snmp_facts.yml
|
||||
release_date: '2021-01-13'
|
||||
1.3.4:
|
||||
changes:
|
||||
bugfixes:
|
||||
- npm - handle json decode exception while parsing command line output (https://github.com/ansible-collections/community.general/issues/1614).
|
||||
release_summary: Bugfix/security release that addresses CVE-2021-20180.
|
||||
security_fixes:
|
||||
- bitbucket_pipeline_variable - **CVE-2021-20180** - hide user sensitive information
|
||||
which are marked as ``secured`` from logging into the console (https://github.com/ansible-collections/community.general/pull/1635).
|
||||
fragments:
|
||||
- 1.3.4.yml
|
||||
- 1614_npm.yml
|
||||
- cve_bitbucket_pipeline_variable.yml
|
||||
release_date: '2021-01-14'
|
||||
1.3.5:
|
||||
changes:
|
||||
bugfixes:
|
||||
- dnsmadeeasy - fix HTTP 400 errors when creating a TXT record (https://github.com/ansible-collections/community.general/issues/1237).
|
||||
- docker_container - allow IPv6 zones (RFC 4007) in bind IPs (https://github.com/ansible-collections/community.docker/pull/66).
|
||||
- docker_image - fix crash on loading images with versions of Docker SDK for
|
||||
Python before 2.5.0 (https://github.com/ansible-collections/community.docker/issues/72,
|
||||
https://github.com/ansible-collections/community.docker/pull/73).
|
||||
- homebrew - add default search path for ``brew`` on Apple silicon hardware
|
||||
(https://github.com/ansible-collections/community.general/pull/1679).
|
||||
- homebrew_cask - add default search path for ``brew`` on Apple silicon hardware
|
||||
(https://github.com/ansible-collections/community.general/pull/1679).
|
||||
- homebrew_tap - add default search path for ``brew`` on Apple silicon hardware
|
||||
(https://github.com/ansible-collections/community.general/pull/1679).
|
||||
- lldp - use ``get_bin_path`` to locate the ``lldpctl`` executable (https://github.com/ansible-collections/community.general/pull/1643).
|
||||
- onepassword lookup plugin - updated to support password items, which place
|
||||
the password field directly in the payload's ``details`` attribute (https://github.com/ansible-collections/community.general/pull/1610).
|
||||
- passwordstore lookup plugin - fix compatibility with gopass when used with
|
||||
``create=true``. While pass returns 1 on a non-existent password, gopass returns
|
||||
10, or 11, depending on whether a similar named password was stored. We now
|
||||
just check standard output and that the return code is not zero (https://github.com/ansible-collections/community.general/pull/1589).
|
||||
- terraform - improve result code checking when executing terraform commands
|
||||
(https://github.com/ansible-collections/community.general/pull/1632).
|
||||
release_summary: Regular bugfix release.
|
||||
fragments:
|
||||
- 1.3.5.yml
|
||||
- 1589-passwordstore-fix-passwordstore.py-to-be-compatible-with-gopass.yaml
|
||||
- 1610-bugfix-onepassword-lookup-plugin.yaml
|
||||
- 1632-using_check_rc_in_terraform.yml
|
||||
- 1654-dnsmadeeasy-http-400-fixes.yaml
|
||||
- 1679-homebrew_search_path.yml
|
||||
- community.docker-66-ipv6-zones.yml
|
||||
- community.docker-73-docker_image-fix-old-docker-py-version.yml
|
||||
- lldp-use-get_bin_path-to-locate-the-lldpctl-executable.yaml
|
||||
release_date: '2021-01-26'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
namespace: community
|
||||
name: general
|
||||
version: 1.3.1
|
||||
version: 1.3.5
|
||||
readme: README.md
|
||||
authors:
|
||||
- Ansible (https://github.com/ansible)
|
||||
|
||||
@@ -50,7 +50,7 @@ def my_represent_scalar(self, tag, value, style=None):
|
||||
# ...no trailing space
|
||||
value = value.rstrip()
|
||||
# ...and non-printable characters
|
||||
value = ''.join(x for x in value if x in string.printable)
|
||||
value = ''.join(x for x in value if x in string.printable or ord(x) >= 0xA0)
|
||||
# ...tabs prevent blocks from expanding
|
||||
value = value.expandtabs()
|
||||
# ...and odd bits of whitespace
|
||||
|
||||
@@ -2,18 +2,16 @@
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
DOCUMENTATION = r'''
|
||||
name: online
|
||||
plugin_type: inventory
|
||||
author:
|
||||
- Remy Leone (@sieben)
|
||||
short_description: Online inventory source
|
||||
short_description: Scaleway (previously Online SAS or Online.net) inventory source
|
||||
description:
|
||||
- Get inventory hosts from Online
|
||||
- Get inventory hosts from Scaleway (previously Online SAS or Online.net).
|
||||
options:
|
||||
plugin:
|
||||
description: token that ensures this is a source file for the 'online' plugin.
|
||||
@@ -45,7 +43,7 @@ DOCUMENTATION = '''
|
||||
- rpn
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
EXAMPLES = r'''
|
||||
# online_inventory.yml file in YAML format
|
||||
# Example command line: ansible-inventory --list -i online_inventory.yml
|
||||
|
||||
|
||||
@@ -187,8 +187,63 @@ class OnePass(object):
|
||||
return rc, out, err
|
||||
|
||||
def _parse_field(self, data_json, field_name, section_title=None):
|
||||
"""
|
||||
Retrieves the desired field from the `op` response payload
|
||||
|
||||
When the item is a `password` type, the password is a key within the `details` key:
|
||||
|
||||
$ op get item 'test item' | jq
|
||||
{
|
||||
[...]
|
||||
"templateUuid": "005",
|
||||
"details": {
|
||||
"notesPlain": "",
|
||||
"password": "foobar",
|
||||
"passwordHistory": [],
|
||||
"sections": [
|
||||
{
|
||||
"name": "linked items",
|
||||
"title": "Related Items"
|
||||
}
|
||||
]
|
||||
},
|
||||
[...]
|
||||
}
|
||||
|
||||
However, when the item is a `login` type, the password is within a fields array:
|
||||
|
||||
$ op get item 'test item' | jq
|
||||
{
|
||||
[...]
|
||||
"details": {
|
||||
"fields": [
|
||||
{
|
||||
"designation": "username",
|
||||
"name": "username",
|
||||
"type": "T",
|
||||
"value": "foo"
|
||||
},
|
||||
{
|
||||
"designation": "password",
|
||||
"name": "password",
|
||||
"type": "P",
|
||||
"value": "bar"
|
||||
}
|
||||
],
|
||||
[...]
|
||||
},
|
||||
[...]
|
||||
"""
|
||||
data = json.loads(data_json)
|
||||
if section_title is None:
|
||||
# https://github.com/ansible-collections/community.general/pull/1610:
|
||||
# check the details dictionary for `field_name` and return it immediately if it exists
|
||||
# when the entry is a "password" instead of a "login" item, the password field is a key
|
||||
# in the `details` dictionary:
|
||||
if field_name in data['details']:
|
||||
return data['details'][field_name]
|
||||
|
||||
# when the field is not found above, iterate through the fields list in the object details
|
||||
for field_data in data['details'].get('fields', []):
|
||||
if field_data.get('name', '').lower() == field_name.lower():
|
||||
return field_data.get('value', '')
|
||||
|
||||
@@ -204,7 +204,7 @@ class LookupModule(LookupBase):
|
||||
def check_pass(self):
|
||||
try:
|
||||
self.passoutput = to_text(
|
||||
check_output2(["pass", self.passname], env=self.env),
|
||||
check_output2(["pass", "show", self.passname], env=self.env),
|
||||
errors='surrogate_or_strict'
|
||||
).splitlines()
|
||||
self.password = self.passoutput[0]
|
||||
@@ -214,7 +214,7 @@ class LookupModule(LookupBase):
|
||||
name, value = line.split(':', 1)
|
||||
self.passdict[name.strip()] = value.strip()
|
||||
except (subprocess.CalledProcessError) as e:
|
||||
if e.returncode == 1 and 'not in the password store' in e.output:
|
||||
if e.returncode != 0 and 'not in the password store' in e.output:
|
||||
# if pass returns 1 and return string contains 'is not in the password store.'
|
||||
# We need to determine if this is valid or Error.
|
||||
if not self.paramvals['create']:
|
||||
|
||||
@@ -75,7 +75,13 @@ EXAMPLES = r"""
|
||||
vars:
|
||||
secret: "{{ lookup('community.general.tss', 1) }}"
|
||||
tasks:
|
||||
- ansible.builtin.debug: msg="the password is {{ (secret['items'] | items2dict(key_name='slug', value_name='itemValue'))['password'] }}"
|
||||
- ansible.builtin.debug:
|
||||
msg: >
|
||||
the password is {{
|
||||
(secret['items']
|
||||
| items2dict(key_name='slug',
|
||||
value_name='itemValue'))['password']
|
||||
}}
|
||||
"""
|
||||
|
||||
from ansible.errors import AnsibleError, AnsibleOptionsError
|
||||
|
||||
@@ -144,7 +144,7 @@ def member_normalize(member_spec):
|
||||
'pre_provisioning', 'network_setting', 'v6_network_setting',
|
||||
'ha_port_setting', 'lan_port_setting', 'lan2_physical_setting',
|
||||
'lan_ha_port_setting', 'mgmt_network_setting', 'v6_mgmt_network_setting']
|
||||
for key in member_spec.keys():
|
||||
for key in list(member_spec.keys()):
|
||||
if key in member_elements and member_spec[key] is not None:
|
||||
member_spec[key] = member_spec[key][0]
|
||||
if isinstance(member_spec[key], dict):
|
||||
|
||||
@@ -1721,7 +1721,7 @@ class TaskParameters(DockerBaseClass):
|
||||
elif p_len == 3:
|
||||
# We only allow IPv4 and IPv6 addresses for the bind address
|
||||
ipaddr = parts[0]
|
||||
if not re.match(r'^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$', parts[0]) and not re.match(r'^\[[0-9a-fA-F:]+\]$', ipaddr):
|
||||
if not re.match(r'^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$', parts[0]) and not re.match(r'^\[[0-9a-fA-F:]+(?:|%[^\]/]+)\]$', ipaddr):
|
||||
self.fail(('Bind addresses for published ports must be IPv4 or IPv6 addresses, not hostnames. '
|
||||
'Use the dig lookup to resolve hostnames. (Found hostname: {0})').format(ipaddr))
|
||||
if re.match(r'^\[[0-9a-fA-F:]+\]$', ipaddr):
|
||||
|
||||
@@ -698,8 +698,8 @@ class ImageManager(DockerBaseClass):
|
||||
if image and image['Id'] == self.results['image']['Id']:
|
||||
self.results['changed'] = False
|
||||
|
||||
if push:
|
||||
self.push_image(repo, repo_tag)
|
||||
if push:
|
||||
self.push_image(repo, repo_tag)
|
||||
|
||||
def build_image(self):
|
||||
'''
|
||||
@@ -749,7 +749,7 @@ class ImageManager(DockerBaseClass):
|
||||
# line = json.loads(line)
|
||||
self.log(line, pretty_print=True)
|
||||
if "stream" in line or "status" in line:
|
||||
build_line = line.get("stream") or line.get("status")
|
||||
build_line = line.get("stream") or line.get("status") or ''
|
||||
build_output.append(build_line)
|
||||
|
||||
if line.get('error'):
|
||||
@@ -774,17 +774,73 @@ class ImageManager(DockerBaseClass):
|
||||
|
||||
:return: image dict
|
||||
'''
|
||||
# Load image(s) from file
|
||||
load_output = []
|
||||
has_output = False
|
||||
try:
|
||||
self.log("Opening image %s" % self.load_path)
|
||||
with open(self.load_path, 'rb') as image_tar:
|
||||
self.log("Loading image from %s" % self.load_path)
|
||||
self.client.load_image(image_tar)
|
||||
output = self.client.load_image(image_tar)
|
||||
if output is not None:
|
||||
# Old versions of Docker SDK of Python (before version 2.5.0) do not return anything.
|
||||
# (See https://github.com/docker/docker-py/commit/7139e2d8f1ea82340417add02090bfaf7794f159)
|
||||
# Note that before that commit, something else than None was returned, but that was also
|
||||
# only introduced in a commit that first appeared in 2.5.0 (see
|
||||
# https://github.com/docker/docker-py/commit/9e793806ff79559c3bc591d8c52a3bbe3cdb7350).
|
||||
# So the above check works for every released version of Docker SDK for Python.
|
||||
has_output = True
|
||||
for line in output:
|
||||
self.log(line, pretty_print=True)
|
||||
if "stream" in line or "status" in line:
|
||||
load_line = line.get("stream") or line.get("status") or ''
|
||||
load_output.append(load_line)
|
||||
else:
|
||||
if LooseVersion(docker_version) < LooseVersion('2.5.0'):
|
||||
self.client.module.warn(
|
||||
'The installed version of the Docker SDK for Python does not return the loading results'
|
||||
' from the Docker daemon. Therefore, we cannot verify whether the expected image was'
|
||||
' loaded, whether multiple images where loaded, or whether the load actually succeeded.'
|
||||
' If you are not stuck with Python 2.6, *please* upgrade to a version newer than 2.5.0'
|
||||
' (2.5.0 was released in August 2017).'
|
||||
)
|
||||
else:
|
||||
self.client.module.warn(
|
||||
'The API version of your Docker daemon is < 1.23, which does not return the image'
|
||||
' loading result from the Docker daemon. Therefore, we cannot verify whether the'
|
||||
' expected image was loaded, whether multiple images where loaded, or whether the load'
|
||||
' actually succeeded. You should consider upgrading your Docker daemon.'
|
||||
)
|
||||
except EnvironmentError as exc:
|
||||
if exc.errno == errno.ENOENT:
|
||||
self.fail("Error opening image %s - %s" % (self.load_path, str(exc)))
|
||||
self.fail("Error loading image %s - %s" % (self.name, str(exc)))
|
||||
self.client.fail("Error opening image %s - %s" % (self.load_path, str(exc)))
|
||||
self.client.fail("Error loading image %s - %s" % (self.name, str(exc)), stdout='\n'.join(load_output))
|
||||
except Exception as exc:
|
||||
self.fail("Error loading image %s - %s" % (self.name, str(exc)))
|
||||
self.client.fail("Error loading image %s - %s" % (self.name, str(exc)), stdout='\n'.join(load_output))
|
||||
|
||||
# Collect loaded images
|
||||
if has_output:
|
||||
# We can only do this when we actually got some output from Docker daemon
|
||||
loaded_images = set()
|
||||
for line in load_output:
|
||||
if line.startswith('Loaded image:'):
|
||||
loaded_images.add(line[len('Loaded image:'):].strip())
|
||||
|
||||
if not loaded_images:
|
||||
self.client.fail("Detected no loaded images. Archive potentially corrupt?", stdout='\n'.join(load_output))
|
||||
|
||||
expected_image = '%s:%s' % (self.name, self.tag)
|
||||
if expected_image not in loaded_images:
|
||||
self.client.fail(
|
||||
"The archive did not contain image '%s'. Instead, found %s." % (
|
||||
expected_image, ', '.join(["'%s'" % image for image in sorted(loaded_images)])),
|
||||
stdout='\n'.join(load_output))
|
||||
loaded_images.remove(expected_image)
|
||||
|
||||
if loaded_images:
|
||||
self.client.module.warn(
|
||||
"The archive contained more images than specified: %s" % (
|
||||
', '.join(["'%s'" % image for image in sorted(loaded_images)]), ))
|
||||
|
||||
return self.client.find_image(self.name, self.tag)
|
||||
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2014, Kevin Carter <kevin.carter@rackspace.com>
|
||||
# Copyright: (c) 2014, Kevin Carter <kevin.carter@rackspace.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: lxc_container
|
||||
short_description: Manage LXC Containers
|
||||
description:
|
||||
- Management of LXC containers
|
||||
- Management of LXC containers.
|
||||
author: "Kevin Carter (@cloudnull)"
|
||||
options:
|
||||
name:
|
||||
@@ -53,7 +53,7 @@ options:
|
||||
type: str
|
||||
vg_name:
|
||||
description:
|
||||
- If Backend store is lvm, specify the name of the volume group.
|
||||
- If backend store is lvm, specify the name of the volume group.
|
||||
type: str
|
||||
default: lxc
|
||||
thinpool:
|
||||
@@ -84,7 +84,7 @@ options:
|
||||
type: str
|
||||
lxc_path:
|
||||
description:
|
||||
- Place container under PATH
|
||||
- Place container under PATH.
|
||||
type: path
|
||||
container_log:
|
||||
description:
|
||||
@@ -109,26 +109,26 @@ options:
|
||||
default: INFO
|
||||
clone_name:
|
||||
description:
|
||||
- Name of the new cloned server. This is only used when state is
|
||||
clone.
|
||||
- Name of the new cloned server.
|
||||
- This is only used when state is clone.
|
||||
type: str
|
||||
clone_snapshot:
|
||||
description:
|
||||
- Create a snapshot a container when cloning. This is not supported
|
||||
by all container storage backends. Enabling this may fail if the
|
||||
backing store does not support snapshots.
|
||||
- Create a snapshot a container when cloning.
|
||||
- This is not supported by all container storage backends.
|
||||
- Enabling this may fail if the backing store does not support snapshots.
|
||||
type: bool
|
||||
default: 'no'
|
||||
archive:
|
||||
description:
|
||||
- Create an archive of a container. This will create a tarball of the
|
||||
running container.
|
||||
- Create an archive of a container.
|
||||
- This will create a tarball of the running container.
|
||||
type: bool
|
||||
default: 'no'
|
||||
archive_path:
|
||||
description:
|
||||
- Path the save the archived container. If the path does not exist
|
||||
the archive method will attempt to create it.
|
||||
- Path the save the archived container.
|
||||
- If the path does not exist the archive method will attempt to create it.
|
||||
type: path
|
||||
archive_compression:
|
||||
choices:
|
||||
@@ -149,16 +149,17 @@ options:
|
||||
- frozen
|
||||
- clone
|
||||
description:
|
||||
- Define the state of a container. If you clone a container using
|
||||
`clone_name` the newly cloned container created in a stopped state.
|
||||
The running container will be stopped while the clone operation is
|
||||
- Define the state of a container.
|
||||
- If you clone a container using I(clone_name) the newly cloned
|
||||
container created in a stopped state.
|
||||
- The running container will be stopped while the clone operation is
|
||||
happening and upon completion of the clone the original container
|
||||
state will be restored.
|
||||
type: str
|
||||
default: started
|
||||
container_config:
|
||||
description:
|
||||
- list of 'key=value' options to use when configuring a container.
|
||||
- A list of C(key=value) options to use when configuring a container.
|
||||
type: list
|
||||
elements: str
|
||||
requirements:
|
||||
@@ -188,7 +189,7 @@ notes:
|
||||
name lxc-python2.
|
||||
'''
|
||||
|
||||
EXAMPLES = """
|
||||
EXAMPLES = r"""
|
||||
- name: Create a started container
|
||||
community.general.lxc_container:
|
||||
name: test-container-started
|
||||
@@ -371,7 +372,7 @@ EXAMPLES = """
|
||||
- test-container-new-archive-destroyed-clone
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
RETURN = r"""
|
||||
lxc_container:
|
||||
description: container information
|
||||
returned: success
|
||||
@@ -928,8 +929,7 @@ class LxcContainerManagement(object):
|
||||
|
||||
if self._container_exists(container_name=self.container_name, lxc_path=self.lxc_path):
|
||||
return str(self.container.state).lower()
|
||||
else:
|
||||
return str('absent')
|
||||
return str('absent')
|
||||
|
||||
def _execute_command(self):
|
||||
"""Execute a shell command."""
|
||||
|
||||
@@ -194,9 +194,7 @@ def preflight_validation(bin_path, project_path, variables_args=None, plan_file=
|
||||
if not os.path.isdir(project_path):
|
||||
module.fail_json(msg="Path for Terraform project '{0}' doesn't exist on this host - check the path and try again please.".format(project_path))
|
||||
|
||||
rc, out, err = module.run_command([bin_path, 'validate'] + variables_args, cwd=project_path, use_unsafe_shell=True)
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed to validate Terraform configuration files:\r\n{0}".format(err))
|
||||
rc, out, err = module.run_command([bin_path, 'validate'] + variables_args, check_rc=True, cwd=project_path, use_unsafe_shell=True)
|
||||
|
||||
|
||||
def _state_args(state_file):
|
||||
@@ -219,10 +217,8 @@ def init_plugins(bin_path, project_path, backend_config, backend_config_files, i
|
||||
for f in backend_config_files:
|
||||
command.extend(['-backend-config', f])
|
||||
if init_reconfigure:
|
||||
command.extend('-reconfigure')
|
||||
rc, out, err = module.run_command(command, cwd=project_path)
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed to initialize Terraform modules:\r\n{0}".format(err))
|
||||
command.extend(['-reconfigure'])
|
||||
rc, out, err = module.run_command(command, check_rc=True, cwd=project_path)
|
||||
|
||||
|
||||
def get_workspace_context(bin_path, project_path):
|
||||
@@ -244,9 +240,7 @@ def get_workspace_context(bin_path, project_path):
|
||||
|
||||
def _workspace_cmd(bin_path, project_path, action, workspace):
|
||||
command = [bin_path, 'workspace', action, workspace, '-no-color']
|
||||
rc, out, err = module.run_command(command, cwd=project_path)
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed to {0} workspace:\r\n{1}".format(action, err))
|
||||
rc, out, err = module.run_command(command, check_rc=True, cwd=project_path)
|
||||
return rc, out, err
|
||||
|
||||
|
||||
@@ -388,15 +382,10 @@ def main():
|
||||
command.append(plan_file)
|
||||
|
||||
if needs_application and not module.check_mode and not state == 'planned':
|
||||
rc, out, err = module.run_command(command, cwd=project_path)
|
||||
rc, out, err = module.run_command(command, check_rc=True, cwd=project_path)
|
||||
# checks out to decide if changes were made during execution
|
||||
if ' 0 added, 0 changed' not in out and not state == "absent" or ' 0 destroyed' not in out:
|
||||
changed = True
|
||||
if rc != 0:
|
||||
module.fail_json(
|
||||
msg="Failure when executing Terraform command. Exited {0}.\nstdout: {1}\nstderr: {2}".format(rc, out, err),
|
||||
command=' '.join(command)
|
||||
)
|
||||
|
||||
outputs_command = [command[0], 'output', '-no-color', '-json'] + _state_args(state_file)
|
||||
rc, outputs_text, outputs_err = module.run_command(outputs_command, cwd=project_path)
|
||||
|
||||
@@ -62,6 +62,9 @@ STATE_COMMAND_MAP = {
|
||||
'restarted': 'restart'
|
||||
}
|
||||
|
||||
MONIT_SERVICES = ['Process', 'File', 'Fifo', 'Filesystem', 'Directory', 'Remote host', 'System', 'Program',
|
||||
'Network']
|
||||
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class StatusValue(namedtuple("Status", "value, is_pending")):
|
||||
@@ -151,7 +154,9 @@ class Monit(object):
|
||||
return self._parse_status(out, err)
|
||||
|
||||
def _parse_status(self, output, err):
|
||||
if "Process '%s'" % self.process_name not in output:
|
||||
escaped_monit_services = '|'.join([re.escape(x) for x in MONIT_SERVICES])
|
||||
pattern = "(%s) '%s'" % (escaped_monit_services, re.escape(self.process_name))
|
||||
if not re.search(pattern, output, re.IGNORECASE):
|
||||
return Status.MISSING
|
||||
|
||||
status_val = re.findall(r"^\s*status\s*([\w\- ]+)", output, re.MULTILINE)
|
||||
|
||||
@@ -19,6 +19,7 @@ module: nagios
|
||||
short_description: Perform common tasks in Nagios related to downtime and notifications.
|
||||
description:
|
||||
- "The C(nagios) module has two basic functions: scheduling downtime and toggling alerts for services or hosts."
|
||||
- The C(nagios) module is not idempotent.
|
||||
- All actions require the I(host) parameter to be given explicitly. In playbooks you can use the C({{inventory_hostname}}) variable to refer
|
||||
to the host the playbook is currently running on.
|
||||
- You can specify multiple services at once by separating them with commas, .e.g., C(services=httpd,nfs,puppet).
|
||||
@@ -26,7 +27,6 @@ description:
|
||||
e.g., C(service=host). This keyword may not be given with other services at the same time.
|
||||
I(Setting alerts/downtime/acknowledge for a host does not affect alerts/downtime/acknowledge for any of the services running on it.)
|
||||
To schedule downtime for all services on particular host use keyword "all", e.g., C(service=all).
|
||||
- When using the C(nagios) module you will need to specify your Nagios server using the C(delegate_to) parameter.
|
||||
options:
|
||||
action:
|
||||
description:
|
||||
|
||||
@@ -467,6 +467,9 @@ class DME2(object):
|
||||
for result in self.all_records:
|
||||
if record_type == "MX":
|
||||
value = record_value.split(" ")[1]
|
||||
# Note that TXT records are surrounded by quotes in the API response.
|
||||
elif record_type == "TXT":
|
||||
value = '"{0}"'.format(record_value)
|
||||
elif record_type == "SRV":
|
||||
value = record_value.split(" ")[3]
|
||||
else:
|
||||
@@ -651,7 +654,9 @@ def main():
|
||||
record_changed = False
|
||||
if current_record:
|
||||
for i in new_record:
|
||||
if str(current_record[i]) != str(new_record[i]):
|
||||
# Remove leading and trailing quote character from values because TXT records
|
||||
# are surrounded by quotes.
|
||||
if str(current_record[i]).strip('"') != str(new_record[i]):
|
||||
record_changed = True
|
||||
new_record['id'] = str(current_record['id'])
|
||||
|
||||
@@ -673,8 +678,11 @@ def main():
|
||||
# create record and monitor as the record does not exist
|
||||
if not current_record:
|
||||
record = DME.createRecord(DME.prepareRecord(new_record))
|
||||
monitor = DME.updateMonitor(record['id'], DME.prepareMonitor(new_monitor))
|
||||
module.exit_json(changed=True, result=dict(record=record, monitor=monitor))
|
||||
if new_monitor.get('monitor') and record_type == "A":
|
||||
monitor = DME.updateMonitor(record['id'], DME.prepareMonitor(new_monitor))
|
||||
module.exit_json(changed=True, result=dict(record=record, monitor=monitor))
|
||||
else:
|
||||
module.exit_json(changed=True, result=dict(record=record, monitor=current_monitor))
|
||||
|
||||
# update the record
|
||||
updated = False
|
||||
|
||||
@@ -112,7 +112,7 @@ def main():
|
||||
except Exception as exception:
|
||||
module.fail_json(msg="Attribute action failed.", details=to_native(exception))
|
||||
|
||||
module.exit_json(changed=True)
|
||||
module.exit_json(changed=False)
|
||||
|
||||
|
||||
def _extract_entry(dn, attrs):
|
||||
@@ -144,24 +144,20 @@ class LdapSearch(LdapGeneric):
|
||||
self.attrsonly = 0
|
||||
|
||||
def _load_scope(self):
|
||||
scope = self.module.params['scope']
|
||||
if scope == 'base':
|
||||
self.scope = ldap.SCOPE_BASE
|
||||
elif scope == 'onelevel':
|
||||
self.scope = ldap.SCOPE_ONELEVEL
|
||||
elif scope == 'subordinate':
|
||||
self.scope = ldap.SCOPE_SUBORDINATE
|
||||
elif scope == 'children':
|
||||
self.scope = ldap.SCOPE_SUBTREE
|
||||
else:
|
||||
raise AssertionError('Implementation error')
|
||||
spec = dict(
|
||||
base=ldap.SCOPE_BASE,
|
||||
onelevel=ldap.SCOPE_ONELEVEL,
|
||||
subordinate=ldap.SCOPE_SUBORDINATE,
|
||||
children=ldap.SCOPE_SUBTREE,
|
||||
)
|
||||
self.scope = spec[self.module.params['scope']]
|
||||
|
||||
def _load_attrs(self):
|
||||
self.attrlist = self.module.params['attrs'] or None
|
||||
|
||||
def main(self):
|
||||
results = self.perform_search()
|
||||
self.module.exit_json(changed=True, results=results)
|
||||
self.module.exit_json(changed=False, results=results)
|
||||
|
||||
def perform_search(self):
|
||||
try:
|
||||
|
||||
@@ -41,7 +41,7 @@ from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def gather_lldp(module):
|
||||
cmd = ['lldpctl', '-f', 'keyvalue']
|
||||
cmd = [module.get_bin_path('lldpctl'), '-f', 'keyvalue']
|
||||
rc, output, err = module.run_command(cmd)
|
||||
if output:
|
||||
output_dict = {}
|
||||
|
||||
@@ -739,7 +739,6 @@ class Nmcli(object):
|
||||
return self.type in (
|
||||
'bond',
|
||||
'bridge',
|
||||
'bridge-slave',
|
||||
'ethernet',
|
||||
'generic',
|
||||
'team',
|
||||
|
||||
@@ -21,51 +21,51 @@ requirements:
|
||||
options:
|
||||
host:
|
||||
description:
|
||||
- Set to target snmp server (normally C({{ inventory_hostname }})).
|
||||
- Set to target SNMP server (normally C({{ inventory_hostname }})).
|
||||
type: str
|
||||
required: true
|
||||
version:
|
||||
description:
|
||||
- SNMP Version to use, v2/v2c or v3.
|
||||
- SNMP Version to use, C(v2), C(v2c) or C(v3).
|
||||
type: str
|
||||
required: true
|
||||
choices: [ v2, v2c, v3 ]
|
||||
community:
|
||||
description:
|
||||
- The SNMP community string, required if version is v2/v2c.
|
||||
- The SNMP community string, required if I(version) is C(v2) or C(v2c).
|
||||
type: str
|
||||
level:
|
||||
description:
|
||||
- Authentication level.
|
||||
- Required if version is v3.
|
||||
- Required if I(version) is C(v3).
|
||||
type: str
|
||||
choices: [ authNoPriv, authPriv ]
|
||||
username:
|
||||
description:
|
||||
- Username for SNMPv3.
|
||||
- Required if version is v3.
|
||||
- Required if I(version) is C(v3).
|
||||
type: str
|
||||
integrity:
|
||||
description:
|
||||
- Hashing algorithm.
|
||||
- Required if version is v3.
|
||||
- Required if I(version) is C(v3).
|
||||
type: str
|
||||
choices: [ md5, sha ]
|
||||
authkey:
|
||||
description:
|
||||
- Authentication key.
|
||||
- Required if version is v3.
|
||||
- Required I(version) is C(v3).
|
||||
type: str
|
||||
privacy:
|
||||
description:
|
||||
- Encryption algorithm.
|
||||
- Required if level is authPriv.
|
||||
- Required if I(level) is C(authPriv).
|
||||
type: str
|
||||
choices: [ aes, des ]
|
||||
privkey:
|
||||
description:
|
||||
- Encryption key.
|
||||
- Required if version is authPriv.
|
||||
- Required if I(level) is C(authPriv).
|
||||
type: str
|
||||
'''
|
||||
|
||||
@@ -174,10 +174,10 @@ PYSNMP_IMP_ERR = None
|
||||
try:
|
||||
from pysnmp.entity.rfc3413.oneliner import cmdgen
|
||||
from pysnmp.proto.rfc1905 import EndOfMibView
|
||||
has_pysnmp = True
|
||||
HAS_PYSNMP = True
|
||||
except Exception:
|
||||
PYSNMP_IMP_ERR = traceback.format_exc()
|
||||
has_pysnmp = False
|
||||
HAS_PYSNMP = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
from ansible.module_utils._text import to_text
|
||||
@@ -221,8 +221,7 @@ def decode_hex(hexstring):
|
||||
return hexstring
|
||||
if hexstring[:2] == "0x":
|
||||
return to_text(binascii.unhexlify(hexstring[2:]))
|
||||
else:
|
||||
return hexstring
|
||||
return hexstring
|
||||
|
||||
|
||||
def decode_mac(hexstring):
|
||||
@@ -231,8 +230,7 @@ def decode_mac(hexstring):
|
||||
return hexstring
|
||||
if hexstring[:2] == "0x":
|
||||
return hexstring[2:]
|
||||
else:
|
||||
return hexstring
|
||||
return hexstring
|
||||
|
||||
|
||||
def lookup_adminstatus(int_adminstatus):
|
||||
@@ -243,8 +241,7 @@ def lookup_adminstatus(int_adminstatus):
|
||||
}
|
||||
if int_adminstatus in adminstatus_options:
|
||||
return adminstatus_options[int_adminstatus]
|
||||
else:
|
||||
return ""
|
||||
return ""
|
||||
|
||||
|
||||
def lookup_operstatus(int_operstatus):
|
||||
@@ -259,8 +256,7 @@ def lookup_operstatus(int_operstatus):
|
||||
}
|
||||
if int_operstatus in operstatus_options:
|
||||
return operstatus_options[int_operstatus]
|
||||
else:
|
||||
return ""
|
||||
return ""
|
||||
|
||||
|
||||
def main():
|
||||
@@ -273,8 +269,8 @@ def main():
|
||||
level=dict(type='str', choices=['authNoPriv', 'authPriv']),
|
||||
integrity=dict(type='str', choices=['md5', 'sha']),
|
||||
privacy=dict(type='str', choices=['aes', 'des']),
|
||||
authkey=dict(type='str'),
|
||||
privkey=dict(type='str'),
|
||||
authkey=dict(type='str', no_log=True),
|
||||
privkey=dict(type='str', no_log=True),
|
||||
),
|
||||
required_together=(
|
||||
['username', 'level', 'integrity', 'authkey'],
|
||||
@@ -285,13 +281,13 @@ def main():
|
||||
|
||||
m_args = module.params
|
||||
|
||||
if not has_pysnmp:
|
||||
if not HAS_PYSNMP:
|
||||
module.fail_json(msg=missing_required_lib('pysnmp'), exception=PYSNMP_IMP_ERR)
|
||||
|
||||
cmdGen = cmdgen.CommandGenerator()
|
||||
|
||||
# Verify that we receive a community when using snmp v2
|
||||
if m_args['version'] == "v2" or m_args['version'] == "v2c":
|
||||
if m_args['version'] in ("v2", "v2c"):
|
||||
if m_args['community'] is None:
|
||||
module.fail_json(msg='Community not set when using snmp version 2')
|
||||
|
||||
@@ -313,7 +309,7 @@ def main():
|
||||
privacy_proto = cmdgen.usmDESPrivProtocol
|
||||
|
||||
# Use SNMP Version 2
|
||||
if m_args['version'] == "v2" or m_args['version'] == "v2c":
|
||||
if m_args['version'] in ("v2", "v2c"):
|
||||
snmp_auth = cmdgen.CommunityData(m_args['community'])
|
||||
|
||||
# Use SNMP Version 3 with authNoPriv
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2015, Matt Makai <matthew.makai@gmail.com>
|
||||
# Copyright: (c) 2015, Matt Makai <matthew.makai@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: sendgrid
|
||||
short_description: Sends an email with the SendGrid API
|
||||
@@ -23,73 +23,73 @@ notes:
|
||||
account."
|
||||
- "In order to use api_key, cc, bcc, attachments, from_name, html_body, headers
|
||||
you must pip install sendgrid"
|
||||
- "since 2.2 username and password are not required if you supply an api_key"
|
||||
- "since 2.2 I(username) and I(password) are not required if you supply an I(api_key)"
|
||||
requirements:
|
||||
- sendgrid python library
|
||||
- sendgrid Python library 1.6.22 or lower (Sendgrid API V2 supported)
|
||||
options:
|
||||
username:
|
||||
type: str
|
||||
description:
|
||||
- username for logging into the SendGrid account.
|
||||
- Since 2.2 it is only required if api_key is not supplied.
|
||||
- Username for logging into the SendGrid account.
|
||||
- Since 2.2 it is only required if I(api_key) is not supplied.
|
||||
password:
|
||||
type: str
|
||||
description:
|
||||
- password that corresponds to the username
|
||||
- Since 2.2 it is only required if api_key is not supplied.
|
||||
- Password that corresponds to the username.
|
||||
- Since 2.2 it is only required if I(api_key) is not supplied.
|
||||
from_address:
|
||||
type: str
|
||||
description:
|
||||
- the address in the "from" field for the email
|
||||
- The address in the "from" field for the email.
|
||||
required: true
|
||||
to_addresses:
|
||||
type: list
|
||||
description:
|
||||
- a list with one or more recipient email addresses
|
||||
- A list with one or more recipient email addresses.
|
||||
required: true
|
||||
subject:
|
||||
type: str
|
||||
description:
|
||||
- the desired subject for the email
|
||||
- The desired subject for the email.
|
||||
required: true
|
||||
api_key:
|
||||
type: str
|
||||
description:
|
||||
- sendgrid API key to use instead of username/password
|
||||
- Sendgrid API key to use instead of username/password.
|
||||
cc:
|
||||
type: list
|
||||
description:
|
||||
- a list of email addresses to cc
|
||||
- A list of email addresses to cc.
|
||||
bcc:
|
||||
type: list
|
||||
description:
|
||||
- a list of email addresses to bcc
|
||||
- A list of email addresses to bcc.
|
||||
attachments:
|
||||
type: list
|
||||
description:
|
||||
- a list of relative or explicit paths of files you want to attach (7MB limit as per SendGrid docs)
|
||||
- A list of relative or explicit paths of files you want to attach (7MB limit as per SendGrid docs).
|
||||
from_name:
|
||||
type: str
|
||||
description:
|
||||
- the name you want to appear in the from field, i.e 'John Doe'
|
||||
- The name you want to appear in the from field, i.e 'John Doe'.
|
||||
html_body:
|
||||
description:
|
||||
- whether the body is html content that should be rendered
|
||||
- Whether the body is html content that should be rendered.
|
||||
type: bool
|
||||
default: 'no'
|
||||
headers:
|
||||
type: dict
|
||||
description:
|
||||
- a dict to pass on as headers
|
||||
- A dict to pass on as headers.
|
||||
body:
|
||||
type: str
|
||||
description:
|
||||
- the e-mail body content
|
||||
- The e-mail body content.
|
||||
required: yes
|
||||
author: "Matt Makai (@makaimc)"
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
EXAMPLES = r'''
|
||||
- name: Send an email to a single recipient that the deployment was successful
|
||||
community.general.sendgrid:
|
||||
username: "{{ sendgrid_username }}"
|
||||
@@ -120,6 +120,8 @@ EXAMPLES = '''
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
SENDGRID_IMP_ERR = None
|
||||
try:
|
||||
import sendgrid
|
||||
@@ -155,6 +157,9 @@ def post_sendgrid_api(module, username, password, from_address, to_addresses,
|
||||
'Accept': 'application/json'}
|
||||
return fetch_url(module, SENDGRID_URI, data=encoded_data, headers=headers, method='POST')
|
||||
else:
|
||||
# Remove this check when adding Sendgrid API v3 support
|
||||
if LooseVersion(sendgrid.version.__version__) > LooseVersion("1.6.22"):
|
||||
module.fail_json(msg="Please install sendgrid==1.6.22 or lower since module uses Sendgrid V2 APIs.")
|
||||
|
||||
if api_key:
|
||||
sg = sendgrid.SendGridClient(api_key)
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: syslogger
|
||||
short_description: Log messages in the syslog
|
||||
@@ -33,7 +33,7 @@ options:
|
||||
default: "daemon"
|
||||
log_pid:
|
||||
description:
|
||||
- Log the pid in brackets.
|
||||
- Log the PID in brackets.
|
||||
type: bool
|
||||
default: False
|
||||
ident:
|
||||
@@ -83,7 +83,7 @@ facility:
|
||||
type: str
|
||||
sample: "info"
|
||||
log_pid:
|
||||
description: Log pid status
|
||||
description: Log PID status
|
||||
returned: always
|
||||
type: bool
|
||||
sample: True
|
||||
@@ -94,11 +94,14 @@ msg:
|
||||
sample: "Hello from Ansible"
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
import syslog
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def get_facility(x):
|
||||
def get_facility(facility):
|
||||
return {
|
||||
'kern': syslog.LOG_KERN,
|
||||
'user': syslog.LOG_USER,
|
||||
@@ -118,10 +121,10 @@ def get_facility(x):
|
||||
'local5': syslog.LOG_LOCAL5,
|
||||
'local6': syslog.LOG_LOCAL6,
|
||||
'local7': syslog.LOG_LOCAL7
|
||||
}.get(x, syslog.LOG_DAEMON)
|
||||
}.get(facility, syslog.LOG_DAEMON)
|
||||
|
||||
|
||||
def get_priority(x):
|
||||
def get_priority(priority):
|
||||
return {
|
||||
'emerg': syslog.LOG_EMERG,
|
||||
'alert': syslog.LOG_ALERT,
|
||||
@@ -131,7 +134,7 @@ def get_priority(x):
|
||||
'notice': syslog.LOG_NOTICE,
|
||||
'info': syslog.LOG_INFO,
|
||||
'debug': syslog.LOG_DEBUG
|
||||
}.get(x, syslog.LOG_INFO)
|
||||
}.get(priority, syslog.LOG_INFO)
|
||||
|
||||
|
||||
def main():
|
||||
@@ -168,20 +171,16 @@ def main():
|
||||
|
||||
# do the logging
|
||||
try:
|
||||
if module.params['log_pid']:
|
||||
syslog.openlog(module.params['ident'],
|
||||
logoption=syslog.LOG_PID,
|
||||
facility=get_facility(module.params['facility']))
|
||||
else:
|
||||
syslog.openlog(module.params['ident'],
|
||||
facility=get_facility(module.params['facility']))
|
||||
syslog.openlog(module.params['ident'],
|
||||
syslog.LOG_PID if module.params['log_pid'] else 0,
|
||||
get_facility(module.params['facility']))
|
||||
syslog.syslog(get_priority(module.params['priority']),
|
||||
module.params['msg'])
|
||||
syslog.closelog()
|
||||
result['changed'] = True
|
||||
|
||||
except Exception:
|
||||
module.fail_json(error='Failed to write to syslog', **result)
|
||||
except Exception as exc:
|
||||
module.fail_json(error='Failed to write to syslog %s' % to_native(exc), exception=traceback.format_exc(), **result)
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
@@ -7,39 +7,39 @@ from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: npm
|
||||
short_description: Manage node.js packages with npm
|
||||
description:
|
||||
- Manage node.js packages with Node Package Manager (npm)
|
||||
- Manage node.js packages with Node Package Manager (npm).
|
||||
author: "Chris Hoffman (@chrishoffman)"
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of a node.js library to install
|
||||
- The name of a node.js library to install.
|
||||
type: str
|
||||
required: false
|
||||
path:
|
||||
description:
|
||||
- The base path where to install the node.js libraries
|
||||
- The base path where to install the node.js libraries.
|
||||
type: path
|
||||
required: false
|
||||
version:
|
||||
description:
|
||||
- The version to be installed
|
||||
- The version to be installed.
|
||||
type: str
|
||||
required: false
|
||||
global:
|
||||
description:
|
||||
- Install the node.js library globally
|
||||
- Install the node.js library globally.
|
||||
required: false
|
||||
default: no
|
||||
type: bool
|
||||
executable:
|
||||
description:
|
||||
- The executable location for npm.
|
||||
- This is useful if you are using a version manager, such as nvm
|
||||
- This is useful if you are using a version manager, such as nvm.
|
||||
type: path
|
||||
required: false
|
||||
ignore_scripts:
|
||||
@@ -55,12 +55,12 @@ options:
|
||||
default: no
|
||||
ci:
|
||||
description:
|
||||
- Install packages based on package-lock file, same as running npm ci
|
||||
- Install packages based on package-lock file, same as running C(npm ci).
|
||||
type: bool
|
||||
default: no
|
||||
production:
|
||||
description:
|
||||
- Install dependencies in production mode, excluding devDependencies
|
||||
- Install dependencies in production mode, excluding devDependencies.
|
||||
required: false
|
||||
type: bool
|
||||
default: no
|
||||
@@ -71,7 +71,7 @@ options:
|
||||
type: str
|
||||
state:
|
||||
description:
|
||||
- The state of the node.js library
|
||||
- The state of the node.js library.
|
||||
required: false
|
||||
type: str
|
||||
default: present
|
||||
@@ -80,7 +80,7 @@ requirements:
|
||||
- npm installed in bin path (recommended /usr/local/bin)
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
EXAMPLES = r'''
|
||||
- name: Install "coffee-script" node.js package.
|
||||
community.general.npm:
|
||||
name: coffee-script
|
||||
@@ -124,12 +124,12 @@ EXAMPLES = '''
|
||||
state: present
|
||||
'''
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
import json
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
class Npm(object):
|
||||
@@ -155,7 +155,7 @@ class Npm(object):
|
||||
else:
|
||||
self.name_version = self.name
|
||||
|
||||
def _exec(self, args, run_in_check_mode=False, check_rc=True):
|
||||
def _exec(self, args, run_in_check_mode=False, check_rc=True, add_package_name=True):
|
||||
if not self.module.check_mode or (self.module.check_mode and run_in_check_mode):
|
||||
cmd = self.executable + args
|
||||
|
||||
@@ -167,7 +167,7 @@ class Npm(object):
|
||||
cmd.append('--ignore-scripts')
|
||||
if self.unsafe_perm:
|
||||
cmd.append('--unsafe-perm')
|
||||
if self.name:
|
||||
if self.name and add_package_name:
|
||||
cmd.append(self.name_version)
|
||||
if self.registry:
|
||||
cmd.append('--registry')
|
||||
@@ -191,7 +191,11 @@ class Npm(object):
|
||||
|
||||
installed = list()
|
||||
missing = list()
|
||||
data = json.loads(self._exec(cmd, True, False))
|
||||
data = {}
|
||||
try:
|
||||
data = json.loads(self._exec(cmd, True, False, False) or '{}')
|
||||
except (getattr(json, 'JSONDecodeError', ValueError)) as e:
|
||||
self.module.fail_json(msg="Failed to parse NPM output with error %s" % to_native(e))
|
||||
if 'dependencies' in data:
|
||||
for dep in data['dependencies']:
|
||||
if 'missing' in data['dependencies'][dep] and data['dependencies'][dep]['missing']:
|
||||
|
||||
@@ -38,7 +38,7 @@ options:
|
||||
- "A ':' separated list of paths to search for 'brew' executable.
|
||||
Since a package (I(formula) in homebrew parlance) location is prefixed relative to the actual path of I(brew) command,
|
||||
providing an alternative I(brew) path enables managing different set of packages in an alternative location in the system."
|
||||
default: '/usr/local/bin'
|
||||
default: '/usr/local/bin:/opt/homebrew/bin'
|
||||
type: path
|
||||
state:
|
||||
description:
|
||||
@@ -76,7 +76,7 @@ notes:
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Install formula foo with 'brew' in default path (C(/usr/local/bin))
|
||||
# Install formula foo with 'brew' in default path
|
||||
- community.general.homebrew:
|
||||
name: foo
|
||||
state: present
|
||||
@@ -871,7 +871,7 @@ def main():
|
||||
elements='str',
|
||||
),
|
||||
path=dict(
|
||||
default="/usr/local/bin",
|
||||
default="/usr/local/bin:/opt/homebrew/bin",
|
||||
required=False,
|
||||
type='path',
|
||||
),
|
||||
|
||||
@@ -32,7 +32,7 @@ options:
|
||||
path:
|
||||
description:
|
||||
- "':' separated list of paths to search for 'brew' executable."
|
||||
default: '/usr/local/bin'
|
||||
default: '/usr/local/bin:/opt/homebrew/bin'
|
||||
type: path
|
||||
state:
|
||||
description:
|
||||
@@ -139,6 +139,7 @@ EXAMPLES = '''
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
from distutils import version
|
||||
|
||||
from ansible.module_utils._text import to_bytes
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
@@ -356,6 +357,18 @@ class HomebrewCask(object):
|
||||
else:
|
||||
self._current_cask = cask
|
||||
return cask
|
||||
|
||||
@property
|
||||
def brew_version(self):
|
||||
try:
|
||||
return self._brew_version
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
@brew_version.setter
|
||||
def brew_version(self, brew_version):
|
||||
self._brew_version = brew_version
|
||||
|
||||
# /class properties -------------------------------------------- }}}
|
||||
|
||||
def __init__(self, module, path=path, casks=None, state=None,
|
||||
@@ -434,15 +447,12 @@ class HomebrewCask(object):
|
||||
if not self.valid_cask(self.current_cask):
|
||||
return False
|
||||
|
||||
cask_is_outdated_command = (
|
||||
[
|
||||
self.brew_path,
|
||||
'cask',
|
||||
'outdated',
|
||||
]
|
||||
+ (['--greedy'] if self.greedy else [])
|
||||
+ [self.current_cask]
|
||||
)
|
||||
if self._brew_cask_command_is_deprecated():
|
||||
base_opts = [self.brew_path, 'outdated', '--cask']
|
||||
else:
|
||||
base_opts = [self.brew_path, 'cask', 'outdated']
|
||||
|
||||
cask_is_outdated_command = base_opts + (['--greedy'] if self.greedy else []) + [self.current_cask]
|
||||
|
||||
rc, out, err = self.module.run_command(cask_is_outdated_command)
|
||||
|
||||
@@ -454,18 +464,35 @@ class HomebrewCask(object):
|
||||
self.message = 'Invalid cask: {0}.'.format(self.current_cask)
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
cmd = [
|
||||
"{brew_path}".format(brew_path=self.brew_path),
|
||||
"cask",
|
||||
"list",
|
||||
self.current_cask
|
||||
]
|
||||
if self._brew_cask_command_is_deprecated():
|
||||
base_opts = [self.brew_path, "list", "--cask"]
|
||||
else:
|
||||
base_opts = [self.brew_path, "cask", "list"]
|
||||
|
||||
cmd = base_opts + [self.current_cask]
|
||||
rc, out, err = self.module.run_command(cmd)
|
||||
|
||||
if rc == 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def _get_brew_version(self):
|
||||
if self.brew_version:
|
||||
return self.brew_version
|
||||
|
||||
cmd = [self.brew_path, '--version']
|
||||
|
||||
rc, out, err = self.module.run_command(cmd, check_rc=True)
|
||||
|
||||
# get version string from first line of "brew --version" output
|
||||
version = out.split('\n')[0].split(' ')[1]
|
||||
self.brew_version = version
|
||||
return self.brew_version
|
||||
|
||||
def _brew_cask_command_is_deprecated(self):
|
||||
# The `brew cask` replacements were fully available in 2.6.0 (https://brew.sh/2020/12/01/homebrew-2.6.0/)
|
||||
return version.LooseVersion(self._get_brew_version()) >= version.LooseVersion('2.6.0')
|
||||
# /checks ------------------------------------------------------ }}}
|
||||
|
||||
# commands ----------------------------------------------------- {{{
|
||||
@@ -537,11 +564,10 @@ class HomebrewCask(object):
|
||||
self.message = 'Casks would be upgraded.'
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
opts = (
|
||||
[self.brew_path, 'cask', 'upgrade']
|
||||
)
|
||||
|
||||
cmd = [opt for opt in opts if opt]
|
||||
if self._brew_cask_command_is_deprecated():
|
||||
cmd = [self.brew_path, 'upgrade', '--cask']
|
||||
else:
|
||||
cmd = [self.brew_path, 'cask', 'upgrade']
|
||||
|
||||
rc, out, err = '', '', ''
|
||||
|
||||
@@ -586,10 +612,12 @@ class HomebrewCask(object):
|
||||
)
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
opts = (
|
||||
[self.brew_path, 'cask', 'install', self.current_cask]
|
||||
+ self.install_options
|
||||
)
|
||||
if self._brew_cask_command_is_deprecated():
|
||||
base_opts = [self.brew_path, 'install', '--cask']
|
||||
else:
|
||||
base_opts = [self.brew_path, 'cask', 'install']
|
||||
|
||||
opts = base_opts + [self.current_cask] + self.install_options
|
||||
|
||||
cmd = [opt for opt in opts if opt]
|
||||
|
||||
@@ -650,11 +678,13 @@ class HomebrewCask(object):
|
||||
)
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
opts = (
|
||||
[self.brew_path, 'cask', command]
|
||||
+ self.install_options
|
||||
+ [self.current_cask]
|
||||
)
|
||||
if self._brew_cask_command_is_deprecated():
|
||||
base_opts = [self.brew_path, command, '--cask']
|
||||
else:
|
||||
base_opts = [self.brew_path, 'cask', command]
|
||||
|
||||
opts = base_opts + self.install_options + [self.current_cask]
|
||||
|
||||
cmd = [opt for opt in opts if opt]
|
||||
|
||||
rc, out, err = '', '', ''
|
||||
@@ -703,10 +733,12 @@ class HomebrewCask(object):
|
||||
)
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
opts = (
|
||||
[self.brew_path, 'cask', 'uninstall', self.current_cask]
|
||||
+ self.install_options
|
||||
)
|
||||
if self._brew_cask_command_is_deprecated():
|
||||
base_opts = [self.brew_path, 'uninstall', '--cask']
|
||||
else:
|
||||
base_opts = [self.brew_path, 'cask', 'uninstall']
|
||||
|
||||
opts = base_opts + [self.current_cask] + self.install_options
|
||||
|
||||
cmd = [opt for opt in opts if opt]
|
||||
|
||||
@@ -747,7 +779,7 @@ def main():
|
||||
elements='str',
|
||||
),
|
||||
path=dict(
|
||||
default="/usr/local/bin",
|
||||
default="/usr/local/bin:/opt/homebrew/bin",
|
||||
required=False,
|
||||
type='path',
|
||||
),
|
||||
|
||||
@@ -218,7 +218,7 @@ def main():
|
||||
brew_path = module.get_bin_path(
|
||||
'brew',
|
||||
required=True,
|
||||
opt_dirs=['/usr/local/bin']
|
||||
opt_dirs=['/usr/local/bin', '/opt/homebrew/bin']
|
||||
)
|
||||
|
||||
taps = module.params['name']
|
||||
|
||||
@@ -135,7 +135,7 @@ def main():
|
||||
try: # Python 2.7.9 and newer
|
||||
ssl_context = ssl.create_unverified_context()
|
||||
except AttributeError: # Legacy Python that doesn't verify HTTPS certificates by default
|
||||
ssl._create_default_context = ssl._create_unverified_context
|
||||
ssl_context = ssl._create_unverified_context()
|
||||
else: # Python 2.7.8 and older
|
||||
ssl._create_default_https_context = ssl._create_unverified_https_context
|
||||
|
||||
|
||||
@@ -85,7 +85,7 @@ EXAMPLES = r'''
|
||||
|
||||
RETURN = r''' # '''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.basic import AnsibleModule, _load_params
|
||||
from ansible_collections.community.general.plugins.module_utils.source_control.bitbucket import BitbucketHelper
|
||||
|
||||
error_messages = {
|
||||
@@ -211,6 +211,14 @@ def delete_pipeline_variable(module, bitbucket, variable_uuid):
|
||||
))
|
||||
|
||||
|
||||
class BitBucketPipelineVariable(AnsibleModule):
|
||||
def __init__(self, *args, **kwargs):
|
||||
params = _load_params() or {}
|
||||
if params.get('secured'):
|
||||
kwargs['argument_spec']['value'].update({'no_log': True})
|
||||
super(BitBucketPipelineVariable, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
def main():
|
||||
argument_spec = BitbucketHelper.bitbucket_argument_spec()
|
||||
argument_spec.update(
|
||||
@@ -221,7 +229,7 @@ def main():
|
||||
secured=dict(type='bool', default=False),
|
||||
state=dict(type='str', choices=['present', 'absent'], required=True),
|
||||
)
|
||||
module = AnsibleModule(
|
||||
module = BitBucketPipelineVariable(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
@@ -114,6 +114,7 @@ from abc import ABCMeta, abstractmethod
|
||||
from time import sleep
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
class ServiceState:
|
||||
@@ -142,6 +143,9 @@ class Plist:
|
||||
|
||||
state, pid, dummy, dummy = LaunchCtlList(module, service).run()
|
||||
|
||||
# Check if readPlist is available or not
|
||||
self.old_plistlib = hasattr(plistlib, 'readPlist')
|
||||
|
||||
self.__file = self.__find_service_plist(service)
|
||||
if self.__file is None:
|
||||
msg = 'Unable to infer the path of %s service plist file' % service
|
||||
@@ -150,7 +154,8 @@ class Plist:
|
||||
module.fail_json(msg=msg)
|
||||
self.__update(module)
|
||||
|
||||
def __find_service_plist(self, service_name):
|
||||
@staticmethod
|
||||
def __find_service_plist(service_name):
|
||||
"""Finds the plist file associated with a service"""
|
||||
|
||||
launchd_paths = [
|
||||
@@ -176,9 +181,38 @@ class Plist:
|
||||
self.__handle_param_enabled(module)
|
||||
self.__handle_param_force_stop(module)
|
||||
|
||||
def __read_plist_file(self, module):
|
||||
service_plist = {}
|
||||
if self.old_plistlib:
|
||||
return plistlib.readPlist(self.__file)
|
||||
|
||||
# readPlist is deprecated in Python 3 and onwards
|
||||
try:
|
||||
with open(self.__file, 'rb') as plist_fp:
|
||||
service_plist = plistlib.load(plist_fp)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to read plist file "
|
||||
"%s due to %s" % (self.__file, to_native(e)))
|
||||
return service_plist
|
||||
|
||||
def __write_plist_file(self, module, service_plist=None):
|
||||
if not service_plist:
|
||||
service_plist = {}
|
||||
|
||||
if self.old_plistlib:
|
||||
plistlib.writePlist(service_plist, self.__file)
|
||||
return
|
||||
# writePlist is deprecated in Python 3 and onwards
|
||||
try:
|
||||
with open(self.__file, 'wb') as plist_fp:
|
||||
plistlib.dump(service_plist, plist_fp)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to write to plist file "
|
||||
" %s due to %s" % (self.__file, to_native(e)))
|
||||
|
||||
def __handle_param_enabled(self, module):
|
||||
if module.params['enabled'] is not None:
|
||||
service_plist = plistlib.readPlist(self.__file)
|
||||
service_plist = self.__read_plist_file(module)
|
||||
|
||||
# Enable/disable service startup at boot if requested
|
||||
# Launchctl does not expose functionality to set the RunAtLoad
|
||||
@@ -191,12 +225,12 @@ class Plist:
|
||||
|
||||
# Update the plist with one of the changes done.
|
||||
if not module.check_mode:
|
||||
plistlib.writePlist(service_plist, self.__file)
|
||||
self.__write_plist_file(module, service_plist)
|
||||
self.__changed = True
|
||||
|
||||
def __handle_param_force_stop(self, module):
|
||||
if module.params['force_stop'] is not None:
|
||||
service_plist = plistlib.readPlist(self.__file)
|
||||
service_plist = self.__read_plist_file(module)
|
||||
|
||||
# Set KeepAlive to false in case force_stop is defined to avoid
|
||||
# that the service gets restarted when stopping was requested.
|
||||
@@ -207,7 +241,7 @@ class Plist:
|
||||
|
||||
# Update the plist with one of the changes done.
|
||||
if not module.check_mode:
|
||||
plistlib.writePlist(service_plist, self.__file)
|
||||
self.__write_plist_file(module, service_plist)
|
||||
self.__changed = True
|
||||
|
||||
def is_changed(self):
|
||||
@@ -325,7 +359,7 @@ class LaunchCtlStart(LaunchCtlTask):
|
||||
def runCommand(self):
|
||||
state, dummy, dummy, dummy = self.get_state()
|
||||
|
||||
if state == ServiceState.STOPPED or state == ServiceState.LOADED:
|
||||
if state in (ServiceState.STOPPED, ServiceState.LOADED):
|
||||
self.reload()
|
||||
self.start()
|
||||
elif state == ServiceState.STARTED:
|
||||
@@ -361,7 +395,7 @@ class LaunchCtlStop(LaunchCtlTask):
|
||||
if self._plist.is_changed():
|
||||
self.reload()
|
||||
self.stop()
|
||||
elif state == ServiceState.STARTED or state == ServiceState.LOADED:
|
||||
elif state in (ServiceState.STARTED, ServiceState.LOADED):
|
||||
if self._plist.is_changed():
|
||||
self.reload()
|
||||
self.stop()
|
||||
|
||||
@@ -136,7 +136,7 @@ def main():
|
||||
base_command.extend(make_parameters)
|
||||
|
||||
# Check if the target is already up to date
|
||||
rc, out, err = run_command(base_command + ['--question'], module, check_rc=False)
|
||||
rc, out, err = run_command(base_command + ['-q'], module, check_rc=False)
|
||||
if module.check_mode:
|
||||
# If we've been asked to do a dry run, we only need
|
||||
# to report whether or not the target is up to date
|
||||
|
||||
@@ -287,7 +287,7 @@ class PamdLine(object):
|
||||
|
||||
@property
|
||||
def is_valid(self):
|
||||
if self.line == '':
|
||||
if self.line.strip() == '':
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -304,6 +304,10 @@ class PamdLine(object):
|
||||
return str(self.line)
|
||||
|
||||
|
||||
class PamdEmptyLine(PamdLine):
|
||||
pass
|
||||
|
||||
|
||||
class PamdComment(PamdLine):
|
||||
|
||||
def __init__(self, line):
|
||||
@@ -445,8 +449,8 @@ class PamdService(object):
|
||||
pamd_line = PamdComment(line)
|
||||
elif line.lstrip().startswith('@include'):
|
||||
pamd_line = PamdInclude(line)
|
||||
elif line == '':
|
||||
pamd_line = PamdLine(line)
|
||||
elif line.strip() == '':
|
||||
pamd_line = PamdEmptyLine(line)
|
||||
else:
|
||||
pamd_line = PamdRule.rule_from_string(line)
|
||||
|
||||
@@ -545,7 +549,7 @@ class PamdService(object):
|
||||
|
||||
# Next we may have to loop backwards if the previous line is a comment. If it
|
||||
# is, we'll get the previous "rule's" previous.
|
||||
while previous_rule is not None and isinstance(previous_rule, PamdComment):
|
||||
while previous_rule is not None and isinstance(previous_rule, (PamdComment, PamdEmptyLine)):
|
||||
previous_rule = previous_rule.prev
|
||||
# Next we'll see if the previous rule matches what we are trying to insert.
|
||||
if previous_rule is not None and not previous_rule.matches(new_type, new_control, new_path):
|
||||
@@ -589,7 +593,7 @@ class PamdService(object):
|
||||
next_rule = current_rule.next
|
||||
# Next we may have to loop forwards if the next line is a comment. If it
|
||||
# is, we'll get the next "rule's" next.
|
||||
while next_rule is not None and isinstance(next_rule, PamdComment):
|
||||
while next_rule is not None and isinstance(next_rule, (PamdComment, PamdEmptyLine)):
|
||||
next_rule = next_rule.next
|
||||
|
||||
# First we create a new rule
|
||||
@@ -780,13 +784,8 @@ def main():
|
||||
required_if=[
|
||||
("state", "args_present", ["module_arguments"]),
|
||||
("state", "args_absent", ["module_arguments"]),
|
||||
("state", "before", ["new_control"]),
|
||||
("state", "before", ["new_type"]),
|
||||
("state", "before", ["new_module_path"]),
|
||||
("state", "after", ["new_control"]),
|
||||
("state", "after", ["new_type"]),
|
||||
("state", "after", ["new_module_path"]),
|
||||
|
||||
("state", "before", ["new_control", "new_type", "new_module_path"]),
|
||||
("state", "after", ["new_control", "new_type", "new_module_path"]),
|
||||
],
|
||||
)
|
||||
content = str()
|
||||
@@ -798,9 +797,7 @@ def main():
|
||||
content = service_file_obj.read()
|
||||
except IOError as e:
|
||||
# If unable to read the file, fail out
|
||||
module.fail_json(msg='Unable to open/read PAM module \
|
||||
file %s with error %s.' %
|
||||
(fname, str(e)))
|
||||
module.fail_json(msg='Unable to open/read PAM module file %s with error %s.' % (fname, str(e)))
|
||||
|
||||
# Assuming we didn't fail, create the service
|
||||
service = PamdService(content)
|
||||
|
||||
@@ -217,10 +217,11 @@ EXAMPLES = r'''
|
||||
|
||||
- name: Extend an existing partition to fill all available space
|
||||
community.general.parted:
|
||||
decice: /dev/sdb
|
||||
device: /dev/sdb
|
||||
number: "{{ sdb_info.partitions | length }}"
|
||||
part_end: "100%"
|
||||
resize: true
|
||||
state: present
|
||||
'''
|
||||
|
||||
|
||||
|
||||
@@ -292,15 +292,16 @@ EXAMPLES = r"""
|
||||
operation: transition
|
||||
status: Done
|
||||
args:
|
||||
fields:
|
||||
customfield_14321: [ {'set': {'value': 'Value of Select' }} ]
|
||||
comment: [ { 'add': { 'body' : 'Test' } }]
|
||||
fields:
|
||||
customfield_14321: [ {'set': {'value': 'Value of Select' }} ]
|
||||
comment: [ { 'add': { 'body' : 'Test' } }]
|
||||
|
||||
"""
|
||||
|
||||
import base64
|
||||
import json
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.six.moves.urllib.request import pathname2url
|
||||
|
||||
@@ -331,7 +332,7 @@ def request(url, user, passwd, timeout, data=None, method=None):
|
||||
try:
|
||||
error = json.loads(info['body'])
|
||||
except Exception:
|
||||
module.fail_json(msg=to_native(info['body']))
|
||||
module.fail_json(msg=to_native(info['body']), exception=traceback.format_exc())
|
||||
if error:
|
||||
msg = []
|
||||
for key in ('errorMessages', 'errors'):
|
||||
@@ -379,27 +380,25 @@ def create(restbase, user, passwd, params):
|
||||
|
||||
url = restbase + '/issue/'
|
||||
|
||||
return post(url, user, passwd, params['timeout'], data)
|
||||
return True, post(url, user, passwd, params['timeout'], data)
|
||||
|
||||
|
||||
def comment(restbase, user, passwd, params):
|
||||
data = {
|
||||
'body': params['comment']
|
||||
}
|
||||
|
||||
url = restbase + '/issue/' + params['issue'] + '/comment'
|
||||
|
||||
return post(url, user, passwd, params['timeout'], data)
|
||||
return True, post(url, user, passwd, params['timeout'], data)
|
||||
|
||||
|
||||
def edit(restbase, user, passwd, params):
|
||||
data = {
|
||||
'fields': params['fields']
|
||||
}
|
||||
|
||||
url = restbase + '/issue/' + params['issue']
|
||||
|
||||
return put(url, user, passwd, params['timeout'], data)
|
||||
return True, put(url, user, passwd, params['timeout'], data)
|
||||
|
||||
|
||||
def update(restbase, user, passwd, params):
|
||||
@@ -408,13 +407,12 @@ def update(restbase, user, passwd, params):
|
||||
}
|
||||
url = restbase + '/issue/' + params['issue']
|
||||
|
||||
return put(url, user, passwd, params['timeout'], data)
|
||||
return True, put(url, user, passwd, params['timeout'], data)
|
||||
|
||||
|
||||
def fetch(restbase, user, passwd, params):
|
||||
url = restbase + '/issue/' + params['issue']
|
||||
ret = get(url, user, passwd, params['timeout'])
|
||||
return ret
|
||||
return False, get(url, user, passwd, params['timeout'])
|
||||
|
||||
|
||||
def search(restbase, user, passwd, params):
|
||||
@@ -424,7 +422,7 @@ def search(restbase, user, passwd, params):
|
||||
url = url + '&fields=' + '&fields='.join([pathname2url(f) for f in fields])
|
||||
if params['maxresults']:
|
||||
url = url + '&maxResults=' + str(params['maxresults'])
|
||||
return get(url, user, passwd, params['timeout'])
|
||||
return False, get(url, user, passwd, params['timeout'])
|
||||
|
||||
|
||||
def transition(restbase, user, passwd, params):
|
||||
@@ -447,7 +445,7 @@ def transition(restbase, user, passwd, params):
|
||||
data = {'transition': {"id": tid},
|
||||
'update': params['fields']}
|
||||
|
||||
return post(url, user, passwd, params['timeout'], data)
|
||||
return True, post(url, user, passwd, params['timeout'], data)
|
||||
|
||||
|
||||
def link(restbase, user, passwd, params):
|
||||
@@ -459,18 +457,7 @@ def link(restbase, user, passwd, params):
|
||||
|
||||
url = restbase + '/issueLink/'
|
||||
|
||||
return post(url, user, passwd, params['timeout'], data)
|
||||
|
||||
|
||||
# Some parameters are required depending on the operation:
|
||||
OP_REQUIRED = dict(create=['project', 'issuetype', 'summary'],
|
||||
comment=['issue', 'comment'],
|
||||
edit=[],
|
||||
update=[],
|
||||
fetch=['issue'],
|
||||
transition=['status'],
|
||||
link=['linktype', 'inwardissue', 'outwardissue'],
|
||||
search=['jql'])
|
||||
return True, post(url, user, passwd, params['timeout'], data)
|
||||
|
||||
|
||||
def main():
|
||||
@@ -500,19 +487,19 @@ def main():
|
||||
timeout=dict(type='float', default=10),
|
||||
validate_certs=dict(default=True, type='bool'),
|
||||
),
|
||||
required_if=(
|
||||
('operation', 'create', ['project', 'issuetype', 'summary']),
|
||||
('operation', 'comment', ['issue', 'comment']),
|
||||
('operation', 'fetch', ['issue']),
|
||||
('operation', 'transition', ['issue', 'status']),
|
||||
('operation', 'link', ['linktype', 'inwardissue', 'outwardissue']),
|
||||
('operation', 'search', ['jql']),
|
||||
),
|
||||
supports_check_mode=False
|
||||
)
|
||||
|
||||
op = module.params['operation']
|
||||
|
||||
# Check we have the necessary per-operation parameters
|
||||
missing = []
|
||||
for parm in OP_REQUIRED[op]:
|
||||
if not module.params[parm]:
|
||||
missing.append(parm)
|
||||
if missing:
|
||||
module.fail_json(msg="Operation %s require the following missing parameters: %s" % (op, ",".join(missing)))
|
||||
|
||||
# Handle rest of parameters
|
||||
uri = module.params['uri']
|
||||
user = module.params['username']
|
||||
@@ -532,12 +519,12 @@ def main():
|
||||
thismod = sys.modules[__name__]
|
||||
method = getattr(thismod, op)
|
||||
|
||||
ret = method(restbase, user, passwd, module.params)
|
||||
changed, ret = method(restbase, user, passwd, module.params)
|
||||
|
||||
except Exception as e:
|
||||
return module.fail_json(msg=to_native(e))
|
||||
return module.fail_json(msg=to_native(e), exception=traceback.format_exc())
|
||||
|
||||
module.exit_json(changed=True, meta=ret)
|
||||
module.exit_json(changed=changed, meta=ret)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
129
shippable.yml
129
shippable.yml
@@ -24,135 +24,6 @@ matrix:
|
||||
- env: T=2.9/sanity/3
|
||||
- env: T=2.9/sanity/4
|
||||
|
||||
- env: T=devel/units/2.6/1
|
||||
- env: T=devel/units/2.7/1
|
||||
- env: T=devel/units/3.5/1
|
||||
- env: T=devel/units/3.6/1
|
||||
- env: T=devel/units/3.7/1
|
||||
- env: T=devel/units/3.8/1
|
||||
- env: T=devel/units/3.9/1
|
||||
|
||||
- env: T=2.10/units/2.6/1
|
||||
- env: T=2.10/units/2.7/1
|
||||
- env: T=2.10/units/3.5/1
|
||||
- env: T=2.10/units/3.6/1
|
||||
- env: T=2.10/units/3.7/1
|
||||
- env: T=2.10/units/3.8/1
|
||||
- env: T=2.10/units/3.9/1
|
||||
|
||||
- env: T=2.9/units/2.6/1
|
||||
- env: T=2.9/units/2.7/1
|
||||
- env: T=2.9/units/3.5/1
|
||||
- env: T=2.9/units/3.6/1
|
||||
- env: T=2.9/units/3.7/1
|
||||
- env: T=2.9/units/3.8/1
|
||||
|
||||
#- env: T=devel/aix/7.2/1
|
||||
- env: T=devel/osx/10.11/1
|
||||
- env: T=devel/macos/10.15/1
|
||||
- env: T=devel/rhel/7.8/1
|
||||
- env: T=devel/rhel/8.2/1
|
||||
- env: T=devel/freebsd/11.1/1
|
||||
- env: T=devel/freebsd/12.1/1
|
||||
- env: T=devel/linux/centos6/1
|
||||
- env: T=devel/linux/centos7/1
|
||||
- env: T=devel/linux/centos8/1
|
||||
- env: T=devel/linux/fedora31/1
|
||||
- env: T=devel/linux/fedora32/1
|
||||
- env: T=devel/linux/opensuse15py2/1
|
||||
- env: T=devel/linux/opensuse15/1
|
||||
- env: T=devel/linux/ubuntu1604/1
|
||||
- env: T=devel/linux/ubuntu1804/1
|
||||
|
||||
#- env: T=devel/aix/7.2/2
|
||||
- env: T=devel/osx/10.11/2
|
||||
- env: T=devel/macos/10.15/2
|
||||
- env: T=devel/rhel/7.8/2
|
||||
- env: T=devel/rhel/8.2/2
|
||||
- env: T=devel/freebsd/11.1/2
|
||||
- env: T=devel/freebsd/12.1/2
|
||||
- env: T=devel/linux/centos6/2
|
||||
- env: T=devel/linux/centos7/2
|
||||
- env: T=devel/linux/centos8/2
|
||||
- env: T=devel/linux/fedora31/2
|
||||
- env: T=devel/linux/fedora32/2
|
||||
- env: T=devel/linux/opensuse15py2/2
|
||||
- env: T=devel/linux/opensuse15/2
|
||||
- env: T=devel/linux/ubuntu1604/2
|
||||
- env: T=devel/linux/ubuntu1804/2
|
||||
|
||||
#- env: T=devel/aix/7.2/3
|
||||
- env: T=devel/osx/10.11/3
|
||||
- env: T=devel/macos/10.15/3
|
||||
- env: T=devel/rhel/7.8/3
|
||||
- env: T=devel/rhel/8.2/3
|
||||
- env: T=devel/freebsd/11.1/3
|
||||
- env: T=devel/freebsd/12.1/3
|
||||
- env: T=devel/linux/centos6/3
|
||||
- env: T=devel/linux/centos7/3
|
||||
- env: T=devel/linux/centos8/3
|
||||
- env: T=devel/linux/fedora31/3
|
||||
- env: T=devel/linux/fedora32/3
|
||||
- env: T=devel/linux/opensuse15py2/3
|
||||
- env: T=devel/linux/opensuse15/3
|
||||
- env: T=devel/linux/ubuntu1604/3
|
||||
- env: T=devel/linux/ubuntu1804/3
|
||||
|
||||
#- env: T=devel/aix/7.2/4
|
||||
- env: T=devel/osx/10.11/4
|
||||
- env: T=devel/macos/10.15/4
|
||||
- env: T=devel/rhel/7.8/4
|
||||
- env: T=devel/rhel/8.2/4
|
||||
- env: T=devel/freebsd/11.1/4
|
||||
- env: T=devel/freebsd/12.1/4
|
||||
- env: T=devel/linux/centos6/4
|
||||
- env: T=devel/linux/centos7/4
|
||||
- env: T=devel/linux/centos8/4
|
||||
- env: T=devel/linux/fedora31/4
|
||||
- env: T=devel/linux/fedora32/4
|
||||
- env: T=devel/linux/opensuse15py2/4
|
||||
- env: T=devel/linux/opensuse15/4
|
||||
- env: T=devel/linux/ubuntu1604/4
|
||||
- env: T=devel/linux/ubuntu1804/4
|
||||
|
||||
#- env: T=devel/aix/7.2/5
|
||||
- env: T=devel/osx/10.11/5
|
||||
- env: T=devel/macos/10.15/5
|
||||
- env: T=devel/rhel/7.8/5
|
||||
- env: T=devel/rhel/8.2/5
|
||||
- env: T=devel/freebsd/11.1/5
|
||||
- env: T=devel/freebsd/12.1/5
|
||||
- env: T=devel/linux/centos6/5
|
||||
- env: T=devel/linux/centos7/5
|
||||
- env: T=devel/linux/centos8/5
|
||||
- env: T=devel/linux/fedora31/5
|
||||
- env: T=devel/linux/fedora32/5
|
||||
- env: T=devel/linux/opensuse15py2/5
|
||||
- env: T=devel/linux/opensuse15/5
|
||||
- env: T=devel/linux/ubuntu1604/5
|
||||
- env: T=devel/linux/ubuntu1804/5
|
||||
|
||||
- env: T=devel/cloud/2.7/1
|
||||
- env: T=devel/cloud/3.6/1
|
||||
|
||||
- env: T=2.10/osx/10.11/1
|
||||
- env: T=2.10/rhel/8.2/1
|
||||
- env: T=2.10/freebsd/12.1/2
|
||||
- env: T=2.10/linux/centos8/2
|
||||
- env: T=2.10/linux/fedora32/3
|
||||
- env: T=2.10/linux/opensuse15/3
|
||||
- env: T=2.10/linux/ubuntu1804/4
|
||||
- env: T=2.10/cloud/3.6/1
|
||||
|
||||
#- env: T=2.9/osx/10.11/1
|
||||
- env: T=2.9/rhel/8.2/1
|
||||
- env: T=2.9/freebsd/12.0/2
|
||||
- env: T=2.9/linux/centos8/2
|
||||
- env: T=2.9/linux/fedora31/3
|
||||
- env: T=2.9/linux/opensuse15/3
|
||||
- env: T=2.9/linux/ubuntu1804/4
|
||||
- env: T=2.9/cloud/3.6/1
|
||||
|
||||
branches:
|
||||
except:
|
||||
- "*-patch-*"
|
||||
|
||||
96
tests/integration/targets/callback/tasks/main.yml
Normal file
96
tests/integration/targets/callback/tasks/main.yml
Normal file
@@ -0,0 +1,96 @@
|
||||
---
|
||||
####################################################################
|
||||
# WARNING: These are designed specifically for Ansible tests #
|
||||
# and should not be used as examples of how to write Ansible roles #
|
||||
####################################################################
|
||||
|
||||
- block:
|
||||
- name: Create temporary playbook files
|
||||
tempfile:
|
||||
state: file
|
||||
suffix: temp
|
||||
loop: "{{ tests }}"
|
||||
loop_control:
|
||||
loop_var: test
|
||||
label: "{{ test.name }}"
|
||||
register: temporary_playbook_files
|
||||
|
||||
- name: Set temporary playbook file content
|
||||
copy:
|
||||
content: "{{ test.playbook }}"
|
||||
dest: "{{ temporary_playbook_files.results[test_idx].path }}"
|
||||
loop: "{{ tests }}"
|
||||
loop_control:
|
||||
loop_var: test
|
||||
index_var: test_idx
|
||||
label: "{{ test.name }}"
|
||||
|
||||
- name: Collect outputs
|
||||
command: "ansible-playbook -i {{ inventory }} {{ playbook }}"
|
||||
environment: "{{ test.environment }}"
|
||||
loop: "{{ tests }}"
|
||||
loop_control:
|
||||
loop_var: test
|
||||
label: "{{ test.name }}"
|
||||
register: outputs
|
||||
changed_when: false
|
||||
vars:
|
||||
inventory: "{{ role_path }}/inventory.yml"
|
||||
playbook: "
|
||||
{%- for result in temporary_playbook_files.results -%}
|
||||
{%- if result.test.name == test.name -%}
|
||||
{{- result.path -}}
|
||||
{%- endif -%}
|
||||
{%- endfor -%}"
|
||||
|
||||
- name: Assert test output equals expected output
|
||||
assert:
|
||||
that: result.output.differences | length == 0
|
||||
loop: "{{ results }}"
|
||||
loop_control:
|
||||
loop_var: result
|
||||
label: "{{ result.name }}"
|
||||
register: assertions
|
||||
vars:
|
||||
results: >-
|
||||
{%- set results = [] -%}
|
||||
{%- for result in outputs.results -%}
|
||||
{%- set differences = [] -%}
|
||||
{%- for i in range([result.test.expected_output | count, result.stdout_lines | count] | max) -%}
|
||||
{%- set line = "line_%s" | format(i+1) -%}
|
||||
{%- set test_line = result.stdout_lines[i] | default(none) -%}
|
||||
{%- set expected_lines = result.test.expected_output[i] | default(none) -%}
|
||||
{%- if expected_lines is not string and expected_lines is not none -%}
|
||||
{%- if test_line not in expected_lines -%}
|
||||
{{- differences.append({
|
||||
line: {
|
||||
'expected_one_of': expected_lines,
|
||||
'got': test_line }}) -}}
|
||||
{%- endif -%}
|
||||
{%- else -%}
|
||||
{%- if expected_lines != test_line -%}
|
||||
{{- differences.append({
|
||||
line: {
|
||||
'expected': expected_lines,
|
||||
'got': test_line }}) -}}
|
||||
{%- endif -%}
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
{{- results.append({
|
||||
'name': result.test.name,
|
||||
'output': {
|
||||
'differences': differences,
|
||||
'expected': result.test.expected_output,
|
||||
'got': result.stdout_lines }}) -}}
|
||||
{%- endfor -%}
|
||||
{{- results -}}
|
||||
|
||||
always:
|
||||
- name: Remove temporary playbooks
|
||||
file:
|
||||
path: "{{ temporary_file.path }}"
|
||||
state: absent
|
||||
loop: "{{ temporary_playbook_files.results }}"
|
||||
loop_control:
|
||||
loop_var: temporary_file
|
||||
label: "{{ temporary_file.test.name }}: {{ temporary_file.path }}"
|
||||
@@ -1,2 +1,2 @@
|
||||
shippable/posix/group3
|
||||
skip/aix
|
||||
needs/target/callback
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
|
||||
- name: Remove temporary playbooks
|
||||
file:
|
||||
path: "{{ temporary_file.path }}"
|
||||
state: absent
|
||||
loop: "{{ temporary_playbook_files.results }}"
|
||||
loop_control:
|
||||
loop_var: temporary_file
|
||||
label: "{{ temporary_file.test.name }}: {{ temporary_file.path }}"
|
||||
@@ -3,13 +3,16 @@
|
||||
# and should not be used as examples of how to write Ansible roles #
|
||||
####################################################################
|
||||
|
||||
- name: Set tests
|
||||
set_fact:
|
||||
- name: Run tests
|
||||
include_role:
|
||||
name: callback
|
||||
vars:
|
||||
tests:
|
||||
- name: Not using diy callback options
|
||||
environment: >-
|
||||
ANSIBLE_FORCE_COLOR=True
|
||||
ANSIBLE_STDOUT_CALLBACK=community.general.diy
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.diy
|
||||
playbook: |
|
||||
- hosts: testhost
|
||||
gather_facts: false
|
||||
@@ -22,19 +25,20 @@
|
||||
"PLAY [testhost] ****************************************************************",
|
||||
"",
|
||||
"TASK [Sample task name] ********************************************************",
|
||||
"\u001b[0;32mok: [testhost] => {\u001b[0m",
|
||||
"\u001b[0;32m \"msg\": \"sample debug msg\"\u001b[0m",
|
||||
"\u001b[0;32m}\u001b[0m",
|
||||
"ok: [testhost] => {",
|
||||
" \"msg\": \"sample debug msg\"",
|
||||
"}",
|
||||
"",
|
||||
"PLAY RECAP *********************************************************************",
|
||||
"\u001b[0;32mtesthost\u001b[0m : \u001b[0;32mok=1 \u001b[0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
"testhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
]
|
||||
|
||||
- name: Set playbook_on_start_msg callback using environment variable
|
||||
environment: >-
|
||||
ANSIBLE_FORCE_COLOR=True
|
||||
ANSIBLE_STDOUT_CALLBACK=community.general.diy
|
||||
ANSIBLE_CALLBACK_DIY_PLAYBOOK_ON_START_MSG="Sample output Sample playbook message"
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.diy
|
||||
ANSIBLE_CALLBACK_DIY_PLAYBOOK_ON_START_MSG: "Sample output Sample playbook message"
|
||||
playbook: |
|
||||
- hosts: testhost
|
||||
gather_facts: false
|
||||
@@ -48,24 +52,25 @@
|
||||
"PLAY [testhost] ****************************************************************",
|
||||
"",
|
||||
"TASK [Sample task name] ********************************************************",
|
||||
"\u001b[0;32mok: [testhost] => {\u001b[0m",
|
||||
"\u001b[0;32m \"msg\": \"sample debug msg\"\u001b[0m",
|
||||
"\u001b[0;32m}\u001b[0m",
|
||||
"ok: [testhost] => {",
|
||||
" \"msg\": \"sample debug msg\"",
|
||||
"}",
|
||||
"",
|
||||
"PLAY RECAP *********************************************************************",
|
||||
"\u001b[0;32mtesthost\u001b[0m : \u001b[0;32mok=1 \u001b[0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
"testhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
]
|
||||
|
||||
- name: Set playbook_on_play_start_msg callback using play variable
|
||||
environment: >-
|
||||
ANSIBLE_FORCE_COLOR=True
|
||||
ANSIBLE_STDOUT_CALLBACK=community.general.diy
|
||||
playbook: |
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.diy
|
||||
playbook: !unsafe |
|
||||
- name: Sample play name
|
||||
hosts: testhost
|
||||
gather_facts: false
|
||||
vars:
|
||||
ansible_callback_diy_playbook_on_play_start_msg: Sample output \{\{ ansible_callback_diy.play.name \}\}
|
||||
ansible_callback_diy_playbook_on_play_start_msg: Sample output {{ ansible_callback_diy.play.name }}
|
||||
tasks:
|
||||
- name: Sample task name
|
||||
debug:
|
||||
@@ -74,23 +79,24 @@
|
||||
"Sample output Sample play name",
|
||||
"",
|
||||
"TASK [Sample task name] ********************************************************",
|
||||
"\u001b[0;32mok: [testhost] => {\u001b[0m",
|
||||
"\u001b[0;32m \"msg\": \"sample debug msg\"\u001b[0m",
|
||||
"\u001b[0;32m}\u001b[0m",
|
||||
"ok: [testhost] => {",
|
||||
" \"msg\": \"sample debug msg\"",
|
||||
"}",
|
||||
"",
|
||||
"PLAY RECAP *********************************************************************",
|
||||
"\u001b[0;32mtesthost\u001b[0m : \u001b[0;32mok=1 \u001b[0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
"testhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
]
|
||||
|
||||
- name: Set playbook_on_task_start_msg callback using play variable
|
||||
environment: >-
|
||||
ANSIBLE_FORCE_COLOR=True
|
||||
ANSIBLE_STDOUT_CALLBACK=community.general.diy
|
||||
playbook: |
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.diy
|
||||
playbook: !unsafe |
|
||||
- hosts: testhost
|
||||
gather_facts: false
|
||||
vars:
|
||||
ansible_callback_diy_playbook_on_task_start_msg: Sample output \{\{ ansible_callback_diy.task.name \}\}
|
||||
ansible_callback_diy_playbook_on_task_start_msg: Sample output {{ ansible_callback_diy.task.name }}
|
||||
tasks:
|
||||
- name: Sample task name
|
||||
debug:
|
||||
@@ -99,19 +105,20 @@
|
||||
"",
|
||||
"PLAY [testhost] ****************************************************************",
|
||||
"Sample output Sample task name",
|
||||
"\u001b[0;32mok: [testhost] => {\u001b[0m",
|
||||
"\u001b[0;32m \"msg\": \"sample debug msg\"\u001b[0m",
|
||||
"\u001b[0;32m}\u001b[0m",
|
||||
"ok: [testhost] => {",
|
||||
" \"msg\": \"sample debug msg\"",
|
||||
"}",
|
||||
"",
|
||||
"PLAY RECAP *********************************************************************",
|
||||
"\u001b[0;32mtesthost\u001b[0m : \u001b[0;32mok=1 \u001b[0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
"testhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
]
|
||||
|
||||
- name: Set playbook_on_task_start_msg callback using task variable
|
||||
environment: >-
|
||||
ANSIBLE_FORCE_COLOR=True
|
||||
ANSIBLE_STDOUT_CALLBACK=community.general.diy
|
||||
playbook: |
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.diy
|
||||
playbook: !unsafe |
|
||||
- hosts: testhost
|
||||
gather_facts: false
|
||||
tasks:
|
||||
@@ -119,24 +126,25 @@
|
||||
debug:
|
||||
msg: sample debug msg
|
||||
vars:
|
||||
ansible_callback_diy_playbook_on_task_start_msg: Sample output \{\{ ansible_callback_diy.task.name \}\}
|
||||
ansible_callback_diy_playbook_on_task_start_msg: Sample output {{ ansible_callback_diy.task.name }}
|
||||
expected_output: [
|
||||
"",
|
||||
"PLAY [testhost] ****************************************************************",
|
||||
"Sample output Sample task name",
|
||||
"\u001b[0;32mok: [testhost] => {\u001b[0m",
|
||||
"\u001b[0;32m \"msg\": \"sample debug msg\"\u001b[0m",
|
||||
"\u001b[0;32m}\u001b[0m",
|
||||
"ok: [testhost] => {",
|
||||
" \"msg\": \"sample debug msg\"",
|
||||
"}",
|
||||
"",
|
||||
"PLAY RECAP *********************************************************************",
|
||||
"\u001b[0;32mtesthost\u001b[0m : \u001b[0;32mok=1 \u001b[0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
"testhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
]
|
||||
|
||||
- name: Set runner_on_ok_msg callback using task variable
|
||||
environment: >-
|
||||
ANSIBLE_FORCE_COLOR=True
|
||||
ANSIBLE_STDOUT_CALLBACK=community.general.diy
|
||||
playbook: |
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.diy
|
||||
playbook: !unsafe |
|
||||
- hosts: testhost
|
||||
gather_facts: false
|
||||
tasks:
|
||||
@@ -144,7 +152,7 @@
|
||||
debug:
|
||||
msg: sample debug msg
|
||||
vars:
|
||||
ansible_callback_diy_runner_on_ok_msg: Sample output \{\{ ansible_callback_diy.result.output.msg \}\}
|
||||
ansible_callback_diy_runner_on_ok_msg: Sample output {{ ansible_callback_diy.result.output.msg }}
|
||||
expected_output: [
|
||||
"",
|
||||
"PLAY [testhost] ****************************************************************",
|
||||
@@ -153,13 +161,14 @@
|
||||
"Sample output sample debug msg",
|
||||
"",
|
||||
"PLAY RECAP *********************************************************************",
|
||||
"\u001b[0;32mtesthost\u001b[0m : \u001b[0;32mok=1 \u001b[0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
"testhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
]
|
||||
|
||||
- name: Set runner_on_failed_msg callback using task variable
|
||||
environment: >-
|
||||
ANSIBLE_FORCE_COLOR=True
|
||||
ANSIBLE_STDOUT_CALLBACK=community.general.diy
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.diy
|
||||
playbook: |
|
||||
- hosts: testhost
|
||||
gather_facts: false
|
||||
@@ -179,14 +188,15 @@
|
||||
"Sample output Sample failure message",
|
||||
"",
|
||||
"PLAY RECAP *********************************************************************",
|
||||
"\u001b[0;32mtesthost\u001b[0m : \u001b[0;32mok=1 \u001b[0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 \u001b[1;35mignored=1 \u001b[0m"
|
||||
"testhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=1 "
|
||||
]
|
||||
|
||||
- name: Set runner_on_skipped_msg callback using task variable
|
||||
environment: >-
|
||||
ANSIBLE_FORCE_COLOR=True
|
||||
ANSIBLE_STDOUT_CALLBACK=community.general.diy
|
||||
playbook: |
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.diy
|
||||
playbook: !unsafe |
|
||||
- hosts: testhost
|
||||
gather_facts: false
|
||||
tasks:
|
||||
@@ -195,7 +205,7 @@
|
||||
msg: sample debug msg
|
||||
when: false
|
||||
vars:
|
||||
ansible_callback_diy_runner_on_skipped_msg: Sample output Skipped \{\{ ansible_callback_diy.task.name \}\}
|
||||
ansible_callback_diy_runner_on_skipped_msg: Sample output Skipped {{ ansible_callback_diy.task.name }}
|
||||
expected_output: [
|
||||
"",
|
||||
"PLAY [testhost] ****************************************************************",
|
||||
@@ -204,26 +214,27 @@
|
||||
"Sample output Skipped Sample task name",
|
||||
"",
|
||||
"PLAY RECAP *********************************************************************",
|
||||
"\u001b[0;32mtesthost\u001b[0m : ok=0 changed=0 unreachable=0 failed=0 \u001b[0;36mskipped=1 \u001b[0m rescued=0 ignored=0 "
|
||||
"testhost : ok=0 changed=0 unreachable=0 failed=0 skipped=1 rescued=0 ignored=0 "
|
||||
]
|
||||
|
||||
- name: Set runner_item_on_ok_msg callback using task variable
|
||||
environment: >-
|
||||
ANSIBLE_FORCE_COLOR=True
|
||||
ANSIBLE_STDOUT_CALLBACK=community.general.diy
|
||||
playbook: |
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.diy
|
||||
playbook: !unsafe |
|
||||
- hosts: testhost
|
||||
gather_facts: false
|
||||
tasks:
|
||||
- name: Sample task name
|
||||
debug:
|
||||
msg: sample debug msg \{\{ item \}\}
|
||||
msg: sample debug msg {{ item }}
|
||||
loop:
|
||||
- sample item 1
|
||||
- sample item 2
|
||||
- sample item 3
|
||||
vars:
|
||||
ansible_callback_diy_runner_item_on_ok_msg: Sample output Looping \{\{ ansible_callback_diy.result.output.msg \}\}
|
||||
ansible_callback_diy_runner_item_on_ok_msg: Sample output Looping {{ ansible_callback_diy.result.output.msg }}
|
||||
expected_output: [
|
||||
"",
|
||||
"PLAY [testhost] ****************************************************************",
|
||||
@@ -234,20 +245,21 @@
|
||||
"Sample output Looping sample debug msg sample item 3",
|
||||
"",
|
||||
"PLAY RECAP *********************************************************************",
|
||||
"\u001b[0;32mtesthost\u001b[0m : \u001b[0;32mok=1 \u001b[0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
"testhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
]
|
||||
|
||||
- name: Set runner_item_on_failed_msg callback using task variable
|
||||
environment: >-
|
||||
ANSIBLE_FORCE_COLOR=True
|
||||
ANSIBLE_STDOUT_CALLBACK=community.general.diy
|
||||
playbook: |
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.diy
|
||||
playbook: !unsafe |
|
||||
- hosts: testhost
|
||||
gather_facts: false
|
||||
tasks:
|
||||
- name: Sample task name
|
||||
debug:
|
||||
msg: sample debug msg \{\{ item \}\}
|
||||
msg: sample debug msg {{ item }}
|
||||
loop:
|
||||
- sample item 1
|
||||
- sample item 2
|
||||
@@ -261,93 +273,95 @@
|
||||
"PLAY [testhost] ****************************************************************",
|
||||
"",
|
||||
"TASK [Sample task name] ********************************************************",
|
||||
"\u001b[0;32mok: [testhost] => (item=sample item 1) => {\u001b[0m",
|
||||
"\u001b[0;32m \"msg\": \"sample debug msg sample item 1\"\u001b[0m",
|
||||
"\u001b[0;32m}\u001b[0m",
|
||||
"ok: [testhost] => (item=sample item 1) => {",
|
||||
" \"msg\": \"sample debug msg sample item 1\"",
|
||||
"}",
|
||||
"Sample output Looping sample failure message",
|
||||
"\u001b[0;32mok: [testhost] => (item=sample item 3) => {\u001b[0m",
|
||||
"\u001b[0;32m \"msg\": \"sample debug msg sample item 3\"\u001b[0m",
|
||||
"\u001b[0;32m}\u001b[0m",
|
||||
"ok: [testhost] => (item=sample item 3) => {",
|
||||
" \"msg\": \"sample debug msg sample item 3\"",
|
||||
"}",
|
||||
[
|
||||
# Apparently a bug was fixed in Ansible, as before it ran through with "All items completed"
|
||||
"\u001b[0;31mfatal: [testhost]: FAILED! => {\"msg\": \"All items completed\"}\u001b[0m",
|
||||
"\u001b[0;31mfatal: [testhost]: FAILED! => {\"msg\": \"One or more items failed\"}\u001b[0m",
|
||||
"fatal: [testhost]: FAILED! => {\"msg\": \"All items completed\"}",
|
||||
"fatal: [testhost]: FAILED! => {\"msg\": \"One or more items failed\"}",
|
||||
],
|
||||
"\u001b[0;36m...ignoring\u001b[0m",
|
||||
"...ignoring",
|
||||
"",
|
||||
"PLAY RECAP *********************************************************************",
|
||||
"\u001b[0;32mtesthost\u001b[0m : \u001b[0;32mok=1 \u001b[0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 \u001b[1;35mignored=1 \u001b[0m"
|
||||
"testhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=1 "
|
||||
]
|
||||
|
||||
- name: Set runner_item_on_skipped_msg callback using task variable
|
||||
environment: >-
|
||||
ANSIBLE_FORCE_COLOR=True
|
||||
ANSIBLE_STDOUT_CALLBACK=community.general.diy
|
||||
playbook: |
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.diy
|
||||
playbook: !unsafe |
|
||||
- hosts: testhost
|
||||
gather_facts: false
|
||||
tasks:
|
||||
- name: Sample task name
|
||||
debug:
|
||||
msg: sample debug msg \{\{ item \}\}
|
||||
msg: sample debug msg {{ item }}
|
||||
loop:
|
||||
- sample item 1
|
||||
- sample item 2
|
||||
- sample item 3
|
||||
when: item != 'sample item 2'
|
||||
vars:
|
||||
ansible_callback_diy_runner_item_on_skipped_msg: Sample output Looping Skipped \{\{ ansible_callback_diy.result.output.item \}\}
|
||||
ansible_callback_diy_runner_item_on_skipped_msg: Sample output Looping Skipped {{ ansible_callback_diy.result.output.item }}
|
||||
expected_output: [
|
||||
"",
|
||||
"PLAY [testhost] ****************************************************************",
|
||||
"",
|
||||
"TASK [Sample task name] ********************************************************",
|
||||
"\u001b[0;32mok: [testhost] => (item=sample item 1) => {\u001b[0m",
|
||||
"\u001b[0;32m \"msg\": \"sample debug msg sample item 1\"\u001b[0m",
|
||||
"\u001b[0;32m}\u001b[0m",
|
||||
"ok: [testhost] => (item=sample item 1) => {",
|
||||
" \"msg\": \"sample debug msg sample item 1\"",
|
||||
"}",
|
||||
"Sample output Looping Skipped sample item 2",
|
||||
"\u001b[0;32mok: [testhost] => (item=sample item 3) => {\u001b[0m",
|
||||
"\u001b[0;32m \"msg\": \"sample debug msg sample item 3\"\u001b[0m",
|
||||
"\u001b[0;32m}\u001b[0m",
|
||||
"ok: [testhost] => (item=sample item 3) => {",
|
||||
" \"msg\": \"sample debug msg sample item 3\"",
|
||||
"}",
|
||||
"",
|
||||
"PLAY RECAP *********************************************************************",
|
||||
"\u001b[0;32mtesthost\u001b[0m : \u001b[0;32mok=1 \u001b[0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
"testhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
]
|
||||
|
||||
- name: Set playbook_on_stats_msg callback using play variable
|
||||
environment: >-
|
||||
ANSIBLE_FORCE_COLOR=True
|
||||
ANSIBLE_STDOUT_CALLBACK=community.general.diy
|
||||
playbook: |
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.diy
|
||||
playbook: !unsafe |
|
||||
- hosts: testhost
|
||||
gather_facts: false
|
||||
vars:
|
||||
ansible_callback_diy_playbook_on_stats_msg: |+2
|
||||
Sample output stats
|
||||
===============================
|
||||
{\% for key in ansible_callback_diy.stats | sort %\}
|
||||
{\% set color_one = "" %\}
|
||||
{\% set color_two = "" %\}
|
||||
{\% if ansible_callback_diy.stats[key] %\}
|
||||
{\% if key == 'ok' %\}
|
||||
{\% set prefix = ' ' %\}
|
||||
{\% set suffix = ' ' %\}
|
||||
{\% elif key == 'changed' %\}
|
||||
{\% set prefix = ' ' %\}
|
||||
{\% set suffix = ' ' %\}
|
||||
{\% elif key == 'processed' %\}
|
||||
{\% set prefix = ' ' %\}
|
||||
{\% set suffix = ' ' %\}
|
||||
{\% elif key == 'skipped' %\}
|
||||
{\% set prefix = ' ' %\}
|
||||
{\% set suffix = ' ' %\}
|
||||
{\% else %\}
|
||||
{\% set prefix = "" %\}
|
||||
{\% set suffix = "" %\}
|
||||
{\% endif %\}
|
||||
\{\{ color_one \}\}\{\{ "%s%s%s" | format(prefix,key,suffix) \}\}\{\{ color_two \}\}: \{\{ ansible_callback_diy.stats[key] | to_nice_yaml \}\}
|
||||
{\% endif %\}
|
||||
{\% endfor %\}
|
||||
{% for key in ansible_callback_diy.stats | sort %}
|
||||
{% set color_one = "" %}
|
||||
{% set color_two = "" %}
|
||||
{% if ansible_callback_diy.stats[key] %}
|
||||
{% if key == 'ok' %}
|
||||
{% set prefix = ' ' %}
|
||||
{% set suffix = ' ' %}
|
||||
{% elif key == 'changed' %}
|
||||
{% set prefix = ' ' %}
|
||||
{% set suffix = ' ' %}
|
||||
{% elif key == 'processed' %}
|
||||
{% set prefix = ' ' %}
|
||||
{% set suffix = ' ' %}
|
||||
{% elif key == 'skipped' %}
|
||||
{% set prefix = ' ' %}
|
||||
{% set suffix = ' ' %}
|
||||
{% else %}
|
||||
{% set prefix = "" %}
|
||||
{% set suffix = "" %}
|
||||
{% endif %}
|
||||
{{ color_one }}{{ "%s%s%s" | format(prefix,key,suffix) }}{{ color_two }}: {{ ansible_callback_diy.stats[key] | to_nice_yaml }}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
tasks:
|
||||
- name: Sample task name
|
||||
debug:
|
||||
@@ -357,9 +371,9 @@
|
||||
"PLAY [testhost] ****************************************************************",
|
||||
"",
|
||||
"TASK [Sample task name] ********************************************************",
|
||||
"\u001b[0;32mok: [testhost] => {\u001b[0m",
|
||||
"\u001b[0;32m \"msg\": \"sample debug msg\"\u001b[0m",
|
||||
"\u001b[0;32m}\u001b[0m",
|
||||
"ok: [testhost] => {",
|
||||
" \"msg\": \"sample debug msg\"",
|
||||
"}",
|
||||
" Sample output stats",
|
||||
"===============================",
|
||||
" ok : testhost: 1",
|
||||
@@ -368,9 +382,10 @@
|
||||
]
|
||||
|
||||
- name: Suppress output on playbook_on_task_start_msg callback using task variable
|
||||
environment: >-
|
||||
ANSIBLE_FORCE_COLOR=True
|
||||
ANSIBLE_STDOUT_CALLBACK=community.general.diy
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.diy
|
||||
playbook: |
|
||||
- hosts: testhost
|
||||
gather_facts: false
|
||||
@@ -383,18 +398,19 @@
|
||||
expected_output: [
|
||||
"",
|
||||
"PLAY [testhost] ****************************************************************",
|
||||
"\u001b[0;32mok: [testhost] => {\u001b[0m",
|
||||
"\u001b[0;32m \"msg\": \"sample debug msg\"\u001b[0m",
|
||||
"\u001b[0;32m}\u001b[0m",
|
||||
"ok: [testhost] => {",
|
||||
" \"msg\": \"sample debug msg\"",
|
||||
"}",
|
||||
"",
|
||||
"PLAY RECAP *********************************************************************",
|
||||
"\u001b[0;32mtesthost\u001b[0m : \u001b[0;32mok=1 \u001b[0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
"testhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
]
|
||||
|
||||
- name: Suppress output on runner_on_ok_msg callback using task variable
|
||||
environment: >-
|
||||
ANSIBLE_FORCE_COLOR=True
|
||||
ANSIBLE_STDOUT_CALLBACK=community.general.diy
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.diy
|
||||
playbook: |
|
||||
- hosts: testhost
|
||||
gather_facts: false
|
||||
@@ -411,14 +427,15 @@
|
||||
"TASK [Sample task name] ********************************************************",
|
||||
"",
|
||||
"PLAY RECAP *********************************************************************",
|
||||
"\u001b[0;32mtesthost\u001b[0m : \u001b[0;32mok=1 \u001b[0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
"testhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
]
|
||||
|
||||
- name: Set runner_on_ok_msg_color using task variable
|
||||
environment: >-
|
||||
ANSIBLE_FORCE_COLOR=True
|
||||
ANSIBLE_STDOUT_CALLBACK=community.general.diy
|
||||
playbook: |
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.diy
|
||||
playbook: !unsafe |
|
||||
- hosts: testhost
|
||||
gather_facts: false
|
||||
tasks:
|
||||
@@ -426,116 +443,15 @@
|
||||
debug:
|
||||
msg: sample debug msg
|
||||
vars:
|
||||
ansible_callback_diy_runner_on_ok_msg: Sample output \{\{ ansible_callback_diy.result.output.msg \}\}
|
||||
ansible_callback_diy_runner_on_ok_msg: Sample output {{ ansible_callback_diy.result.output.msg }}
|
||||
ansible_callback_diy_runner_on_ok_msg_color: blue
|
||||
expected_output: [
|
||||
"",
|
||||
"PLAY [testhost] ****************************************************************",
|
||||
"",
|
||||
"TASK [Sample task name] ********************************************************",
|
||||
"\u001b[0;34mSample output sample debug msg\u001b[0m",
|
||||
"Sample output sample debug msg",
|
||||
"",
|
||||
"PLAY RECAP *********************************************************************",
|
||||
"\u001b[0;32mtesthost\u001b[0m : \u001b[0;32mok=1 \u001b[0m changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
"testhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
]
|
||||
|
||||
- when: ansible_distribution == 'FreeBSD' and ansible_distribution_version in ['11.1', '12.1']
|
||||
name: Create temporary playbooks
|
||||
block:
|
||||
- name: Create temporary playbook files
|
||||
tempfile:
|
||||
state: file
|
||||
suffix: temp
|
||||
loop: "{{ tests }}"
|
||||
loop_control:
|
||||
loop_var: test
|
||||
label: "{{ test.name }}"
|
||||
register: temporary_playbook_files
|
||||
notify: Remove temporary playbooks
|
||||
|
||||
- name: Set temporary playbook file content
|
||||
copy:
|
||||
content: "{{ playbook }}"
|
||||
dest: "{{ playbook_file }}"
|
||||
loop: "{{ tests }}"
|
||||
loop_control:
|
||||
loop_var: test
|
||||
label: "{{ test.name }}"
|
||||
vars:
|
||||
playbook_file: "{{ (temporary_playbook_files.results | selectattr('test.name', 'equalto', test.name) | list())[0].path }}"
|
||||
playbook: "{{
|
||||
test.playbook
|
||||
| regex_replace('(\\\\{){2}(.*?)(\\\\}){2}', '{{\\2}}')
|
||||
| regex_replace('({\\\\%)(.*?)(%\\\\})', '{%\\2%}')
|
||||
}}"
|
||||
|
||||
- name: Collect outputs
|
||||
shell: "{{ env }} ansible-playbook -i {{ inventory }} {{ playbook }}"
|
||||
loop: "{{ tests }}"
|
||||
loop_control:
|
||||
loop_var: test
|
||||
label: "{{ test.name }}"
|
||||
register: outputs
|
||||
changed_when: false
|
||||
vars:
|
||||
env: "{{ test.environment | regex_replace('(\\\\{){2}(.*?)(\\\\}){2}', '{{\\2}}') }}"
|
||||
inventory: "{{ role_path }}/inventory.yml"
|
||||
playbook: "
|
||||
{%- if temporary_playbook_files is changed -%}
|
||||
{%- for result in temporary_playbook_files.results -%}
|
||||
{%- if result.test.name == test.name -%}
|
||||
{{- result.path -}}
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
{%- else -%}
|
||||
{{- [
|
||||
'/dev/fd/0 <<EOF',
|
||||
(
|
||||
test.playbook
|
||||
| regex_replace('(\\\\{){2}(.*?)(\\\\}){2}', '{{\\2}}')
|
||||
| regex_replace('({\\\\%)(.*?)(%\\\\})', '{%\\2%}')
|
||||
)
|
||||
] | join('\n') -}}
|
||||
{%- endif -%}"
|
||||
|
||||
- name: Assert test output equals expected output
|
||||
assert:
|
||||
that: result.output.differences | length == 0
|
||||
loop: "{{ results }}"
|
||||
loop_control:
|
||||
loop_var: result
|
||||
label: "{{ result.name }}"
|
||||
register: assertions
|
||||
vars:
|
||||
results: >-
|
||||
{%- set results = [] -%}
|
||||
{%- for result in outputs.results -%}
|
||||
{%- set differences = [] -%}
|
||||
{%- for i in range([result.test.expected_output | count, result.stdout_lines | count] | max) -%}
|
||||
{%- set line = "line_%s" | format(i+1) -%}
|
||||
{%- set test_line = result.stdout_lines[i] | default(none) -%}
|
||||
{%- set expected_lines = result.test.expected_output[i] | default(none) -%}
|
||||
{%- if expected_lines is not string -%}
|
||||
{%- if test_line not in expected_lines -%}
|
||||
{{- differences.append({
|
||||
line: {
|
||||
'expected_one_of': expected_lines,
|
||||
'got': test_line }}) -}}
|
||||
{%- endif -%}
|
||||
{%- else -%}
|
||||
{%- if expected_lines != test_line -%}
|
||||
{{- differences.append({
|
||||
line: {
|
||||
'expected': expected_line,
|
||||
'got': test_line }}) -}}
|
||||
{%- endif -%}
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
{{- results.append({
|
||||
'name': result.test.name,
|
||||
'output': {
|
||||
'differences': differences,
|
||||
'expected': result.test.expected_output,
|
||||
'got': result.stdout_lines }}) -}}
|
||||
{%- endfor -%}
|
||||
{{- results -}}
|
||||
|
||||
2
tests/integration/targets/callback_yaml/aliases
Normal file
2
tests/integration/targets/callback_yaml/aliases
Normal file
@@ -0,0 +1,2 @@
|
||||
shippable/posix/group1
|
||||
needs/target/callback
|
||||
60
tests/integration/targets/callback_yaml/tasks/main.yml
Normal file
60
tests/integration/targets/callback_yaml/tasks/main.yml
Normal file
@@ -0,0 +1,60 @@
|
||||
---
|
||||
####################################################################
|
||||
# WARNING: These are designed specifically for Ansible tests #
|
||||
# and should not be used as examples of how to write Ansible roles #
|
||||
####################################################################
|
||||
|
||||
- name: Run tests
|
||||
include_role:
|
||||
name: callback
|
||||
vars:
|
||||
tests:
|
||||
- name: Basic run
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.yaml
|
||||
playbook: |
|
||||
- hosts: testhost
|
||||
gather_facts: false
|
||||
tasks:
|
||||
- name: Sample task name
|
||||
debug:
|
||||
msg: sample debug msg
|
||||
expected_output: [
|
||||
"",
|
||||
"PLAY [testhost] ****************************************************************",
|
||||
"",
|
||||
"TASK [Sample task name] ********************************************************",
|
||||
"ok: [testhost] => ",
|
||||
" msg: sample debug msg",
|
||||
"",
|
||||
"PLAY RECAP *********************************************************************",
|
||||
"testhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
]
|
||||
- name: Test umlauts in multiline
|
||||
environment:
|
||||
ANSIBLE_NOCOLOR: 'true'
|
||||
ANSIBLE_FORCE_COLOR: 'false'
|
||||
ANSIBLE_STDOUT_CALLBACK: community.general.yaml
|
||||
playbook: |
|
||||
- hosts: testhost
|
||||
gather_facts: false
|
||||
tasks:
|
||||
- name: Umlaut output
|
||||
debug:
|
||||
msg: "äöü\néêè\nßï☺"
|
||||
expected_output: [
|
||||
"",
|
||||
"PLAY [testhost] ****************************************************************",
|
||||
"",
|
||||
"TASK [Umlaut output] ***********************************************************",
|
||||
"ok: [testhost] => ",
|
||||
" msg: |-",
|
||||
" äöü",
|
||||
" éêè",
|
||||
" ßï☺",
|
||||
"",
|
||||
"PLAY RECAP *********************************************************************",
|
||||
"testhost : ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0 "
|
||||
]
|
||||
@@ -1,3 +1,3 @@
|
||||
needs/root
|
||||
shippable/posix/group3
|
||||
skip/macos # FIXME
|
||||
skip/macos # Skipped due to limitation of macOS 10.15 SIP, please read https://github.com/ansible-collections/community.general/issues/1017#issuecomment-755088895
|
||||
|
||||
@@ -2914,9 +2914,22 @@ avoid such warnings, please quote the value.' in log_options_2.warnings"
|
||||
published_ports:
|
||||
- '127.0.0.1:9002:9002/tcp'
|
||||
- '[::1]:9003:9003/tcp'
|
||||
- '[fe80::1%test]:90:90/tcp'
|
||||
force_kill: yes
|
||||
register: published_ports_5
|
||||
|
||||
- name: published_ports (ports with IP addresses, idempotent)
|
||||
docker_container:
|
||||
image: "{{ docker_test_image_alpine }}"
|
||||
command: '/bin/sh -c "sleep 10m"'
|
||||
name: "{{ cname }}"
|
||||
state: started
|
||||
published_ports:
|
||||
- '127.0.0.1:9002:9002/tcp'
|
||||
- '[::1]:9003:9003/tcp'
|
||||
- '[fe80::1%test]:90:90/tcp'
|
||||
register: published_ports_6
|
||||
|
||||
- name: published_ports (no published ports)
|
||||
docker_container:
|
||||
image: "{{ docker_test_image_alpine }}"
|
||||
@@ -2927,7 +2940,7 @@ avoid such warnings, please quote the value.' in log_options_2.warnings"
|
||||
comparisons:
|
||||
published_ports: strict
|
||||
force_kill: yes
|
||||
register: published_ports_6
|
||||
register: published_ports_7
|
||||
|
||||
- name: cleanup
|
||||
docker_container:
|
||||
@@ -2943,7 +2956,8 @@ avoid such warnings, please quote the value.' in log_options_2.warnings"
|
||||
- published_ports_3 is not changed
|
||||
- published_ports_4 is changed
|
||||
- published_ports_5 is changed
|
||||
- published_ports_6 is changed
|
||||
- published_ports_6 is not changed
|
||||
- published_ports_7 is changed
|
||||
|
||||
####################################################################
|
||||
## pull ############################################################
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
|
||||
- name: Registering image name
|
||||
set_fact:
|
||||
inames: "{{ inames + [iname, test_image_base ~ ':latest', hello_world_image_base ~ ':latest'] }}"
|
||||
inames: "{{ inames + [iname, test_image_base ~ ':latest', hello_world_image_base ~ ':latest', hello_world_image_base ~ ':newtag', hello_world_image_base ~ ':newtag2'] }}"
|
||||
|
||||
####################################################################
|
||||
## interact with test registry #####################################
|
||||
@@ -101,6 +101,82 @@
|
||||
- facts_2.images | length == 0
|
||||
- facts_3.images | length == 1
|
||||
|
||||
- name: Tag different image with new tag
|
||||
docker_image:
|
||||
name: "{{ docker_test_image_alpine_different }}"
|
||||
repository: "{{ hello_world_image_base }}:newtag"
|
||||
push: no
|
||||
source: pull
|
||||
|
||||
- name: Push different image with new tag
|
||||
docker_image:
|
||||
name: "{{ hello_world_image_base }}"
|
||||
repository: "{{ hello_world_image_base }}"
|
||||
tag: newtag
|
||||
push: yes
|
||||
source: local
|
||||
register: push_1_different
|
||||
|
||||
- name: Push different image with new tag (idempotent)
|
||||
docker_image:
|
||||
name: "{{ hello_world_image_base }}"
|
||||
repository: "{{ hello_world_image_base }}"
|
||||
tag: newtag
|
||||
push: yes
|
||||
source: local
|
||||
register: push_2_different
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- push_1_different is changed
|
||||
- push_2_different is not changed
|
||||
|
||||
- name: Tag same image with new tag
|
||||
docker_image:
|
||||
name: "{{ docker_test_image_alpine_different }}"
|
||||
repository: "{{ hello_world_image_base }}:newtag2"
|
||||
push: no
|
||||
source: pull
|
||||
|
||||
- name: Push same image with new tag
|
||||
docker_image:
|
||||
name: "{{ hello_world_image_base }}"
|
||||
repository: "{{ hello_world_image_base }}"
|
||||
tag: newtag2
|
||||
push: yes
|
||||
source: local
|
||||
register: push_1_same
|
||||
|
||||
- name: Push same image with new tag (idempotent)
|
||||
docker_image:
|
||||
name: "{{ hello_world_image_base }}"
|
||||
repository: "{{ hello_world_image_base }}"
|
||||
tag: newtag2
|
||||
push: yes
|
||||
source: local
|
||||
register: push_2_same
|
||||
|
||||
- assert:
|
||||
that:
|
||||
# NOTE: This should be:
|
||||
# - push_1_same is changed
|
||||
# Unfortunately docker does *NOT* report whether the tag already existed or not.
|
||||
# Here are the logs returned by client.push() for both tasks (which are exactly the same):
|
||||
# push_1_same:
|
||||
# {"status": "The push refers to repository [localhost:32796/test/hello-world]"},
|
||||
# {"id": "3fc64803ca2d", "progressDetail": {}, "status": "Preparing"},
|
||||
# {"id": "3fc64803ca2d", "progressDetail": {}, "status": "Layer already exists"},
|
||||
# {"status": "newtag2: digest: sha256:92251458088c638061cda8fd8b403b76d661a4dc6b7ee71b6affcf1872557b2b size: 528"},
|
||||
# {"aux": {"Digest": "sha256:92251458088c638061cda8fd8b403b76d661a4dc6b7ee71b6affcf1872557b2b", "Size": 528, "Tag": "newtag2"}, "progressDetail": {}}
|
||||
# push_2_same:
|
||||
# {"status": "The push refers to repository [localhost:32796/test/hello-world]"},
|
||||
# {"id": "3fc64803ca2d", "progressDetail": {}, "status": "Preparing"},
|
||||
# {"id": "3fc64803ca2d", "progressDetail": {}, "status": "Layer already exists"},
|
||||
# {"status": "newtag2: digest: sha256:92251458088c638061cda8fd8b403b76d661a4dc6b7ee71b6affcf1872557b2b size: 528"},
|
||||
# {"aux": {"Digest": "sha256:92251458088c638061cda8fd8b403b76d661a4dc6b7ee71b6affcf1872557b2b", "Size": 528, "Tag": "newtag2"}, "progressDetail": {}}
|
||||
- push_1_same is not changed
|
||||
- push_2_same is not changed
|
||||
|
||||
####################################################################
|
||||
## repository ######################################################
|
||||
####################################################################
|
||||
|
||||
@@ -189,6 +189,11 @@
|
||||
source: pull
|
||||
register: archive_image
|
||||
|
||||
- name: Create invalid archive
|
||||
copy:
|
||||
dest: "{{ output_dir }}/image-invalid.tar"
|
||||
content: "this is not a valid image"
|
||||
|
||||
- name: remove image
|
||||
docker_image:
|
||||
name: "{{ docker_test_image_hello_world }}"
|
||||
@@ -209,11 +214,42 @@
|
||||
source: load
|
||||
register: load_image_1
|
||||
|
||||
- name: load image (wrong name)
|
||||
docker_image:
|
||||
name: foo:bar
|
||||
load_path: "{{ output_dir }}/image.tar"
|
||||
source: load
|
||||
register: load_image_2
|
||||
ignore_errors: true
|
||||
|
||||
- name: load image (invalid image)
|
||||
docker_image:
|
||||
name: foo:bar
|
||||
load_path: "{{ output_dir }}/image-invalid.tar"
|
||||
source: load
|
||||
register: load_image_3
|
||||
ignore_errors: true
|
||||
|
||||
- name: load image (invalid image, old API version)
|
||||
docker_image:
|
||||
name: foo:bar
|
||||
load_path: "{{ output_dir }}/image-invalid.tar"
|
||||
source: load
|
||||
api_version: "1.22"
|
||||
register: load_image_4
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- load_image is changed
|
||||
- load_image_1 is not changed
|
||||
- archive_image['image']['Id'] == load_image['image']['Id']
|
||||
- load_image_1 is not changed
|
||||
- load_image_2 is failed
|
||||
- >-
|
||||
"The archive did not contain image 'foo:bar'. Instead, found '" ~ docker_test_image_hello_world ~ "'." == load_image_2.msg
|
||||
- load_image_3 is failed
|
||||
- '"Detected no loaded images. Archive potentially corrupt?" == load_image_3.msg'
|
||||
- load_image_4 is changed
|
||||
- "'The API version of your Docker daemon is < 1.23, which does not return the image loading result from the Docker daemon. Therefore, we cannot verify whether the expected image was loaded, whether multiple images where loaded, or whether the load actually succeeded. You should consider upgrading your Docker daemon.' in load_image_4.warnings"
|
||||
|
||||
####################################################################
|
||||
## path ############################################################
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
shippable/posix/group2
|
||||
skip/python3
|
||||
skip/aix
|
||||
disabled # tests use bitbucket, which dropped mercurial support on 2020-08-26 (https://bitbucket.org/blog/sunsetting-mercurial-support-in-bitbucket)
|
||||
|
||||
@@ -4,22 +4,9 @@
|
||||
####################################################################
|
||||
|
||||
# test code for the hg module
|
||||
# (c) 2014, James Tanner <tanner.jc@gmail.com>
|
||||
|
||||
# This file is part of Ansible
|
||||
# Copyright: (c) 2014, James Tanner <tanner.jc@gmail.com>
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
- name: determine if mercurial is already installed
|
||||
command: which hg
|
||||
|
||||
@@ -1,27 +1,16 @@
|
||||
# test code for the hg module
|
||||
# (c) 2018, Ansible Project
|
||||
|
||||
# This file is part of Ansible
|
||||
# Copyright: (c) 2018, Ansible Project
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
|
||||
- name: set where to extract the repo
|
||||
set_fact: checkout_dir={{ output_dir }}/epdb
|
||||
set_fact:
|
||||
checkout_dir: "{{ output_dir }}/hg_project_test"
|
||||
|
||||
- name: set what repo to use
|
||||
set_fact: repo=https://bitbucket.org/rpathsync/epdb
|
||||
set_fact:
|
||||
repo: "http://hg.pf.osdn.net/view/a/ak/akasurde/hg_project_test"
|
||||
|
||||
- name: clean out the output_dir
|
||||
shell: rm -rf {{ output_dir }}/*
|
||||
@@ -30,7 +19,9 @@
|
||||
shell: which hg
|
||||
|
||||
- name: initial checkout
|
||||
hg: repo={{ repo }} dest={{ checkout_dir }}
|
||||
hg:
|
||||
repo: "{{ repo }}"
|
||||
dest: "{{ checkout_dir }}"
|
||||
register: hg_result
|
||||
|
||||
- debug: var=hg_result
|
||||
@@ -46,17 +37,21 @@
|
||||
- "hg_result.changed"
|
||||
|
||||
- name: repeated checkout
|
||||
hg: repo={{ repo }} dest={{ checkout_dir }}
|
||||
hg:
|
||||
repo: "{{ repo }}"
|
||||
dest: "{{ checkout_dir }}"
|
||||
register: hg_result2
|
||||
|
||||
- debug: var=hg_result2
|
||||
|
||||
- name: check for tags
|
||||
stat: path={{ checkout_dir }}/.hgtags
|
||||
stat:
|
||||
path: "{{ checkout_dir }}/.hgtags"
|
||||
register: tags
|
||||
|
||||
- name: check for remotes
|
||||
stat: path={{ checkout_dir }}/.hg/branch
|
||||
stat:
|
||||
path: "{{ checkout_dir }}/.hg/branch"
|
||||
register: branches
|
||||
|
||||
- debug: var=tags
|
||||
@@ -68,13 +63,16 @@
|
||||
- "tags.stat.isreg"
|
||||
- "branches.stat.isreg"
|
||||
|
||||
- name: verify on a reclone things are marked unchanged
|
||||
- name: verify on a re-clone things are marked unchanged
|
||||
assert:
|
||||
that:
|
||||
- "not hg_result2.changed"
|
||||
|
||||
- name: Checkout non-existent repo clone
|
||||
hg: repo=https://bitbucket.org/pyro46/pythonscript_1 clone=no update=no
|
||||
hg:
|
||||
repo: "http://hg.pf.osdn.net/view/a/ak/akasurde/hg_project_test_1"
|
||||
clone: no
|
||||
update: no
|
||||
register: hg_result3
|
||||
ignore_errors: true
|
||||
|
||||
|
||||
@@ -1 +1,7 @@
|
||||
unsupported
|
||||
destructive
|
||||
shippable/posix/group3
|
||||
skip/aix
|
||||
skip/osx
|
||||
skip/macos
|
||||
skip/freebsd
|
||||
needs/root
|
||||
|
||||
2
tests/integration/targets/java_cert/meta/main.yml
Normal file
2
tests/integration/targets/java_cert/meta/main.yml
Normal file
@@ -0,0 +1,2 @@
|
||||
dependencies:
|
||||
- setup_java_keytool
|
||||
@@ -3,56 +3,58 @@
|
||||
# WARNING: These are designed specifically for Ansible tests #
|
||||
# and should not be used as examples of how to write Ansible roles #
|
||||
####################################################################
|
||||
- when: has_java_keytool
|
||||
block:
|
||||
|
||||
- name: prep pkcs12 file
|
||||
copy: src="{{ test_pkcs12_path }}" dest="{{output_dir}}/{{ test_pkcs12_path }}"
|
||||
- name: prep pkcs12 file
|
||||
copy: src="{{ test_pkcs12_path }}" dest="{{output_dir}}/{{ test_pkcs12_path }}"
|
||||
|
||||
- name: import pkcs12
|
||||
java_cert:
|
||||
pkcs12_path: "{{output_dir}}/{{ test_pkcs12_path }}"
|
||||
pkcs12_password: changeit
|
||||
pkcs12_alias: default
|
||||
cert_alias: default
|
||||
keystore_path: "{{output_dir}}/{{ test_keystore_path }}"
|
||||
keystore_pass: changeme_keystore
|
||||
keystore_create: yes
|
||||
state: present
|
||||
register: result_success
|
||||
- name: verify success
|
||||
assert:
|
||||
that:
|
||||
- result_success is successful
|
||||
- name: import pkcs12
|
||||
java_cert:
|
||||
pkcs12_path: "{{output_dir}}/{{ test_pkcs12_path }}"
|
||||
pkcs12_password: changeit
|
||||
pkcs12_alias: default
|
||||
cert_alias: default
|
||||
keystore_path: "{{output_dir}}/{{ test_keystore_path }}"
|
||||
keystore_pass: changeme_keystore
|
||||
keystore_create: yes
|
||||
state: present
|
||||
register: result_success
|
||||
- name: verify success
|
||||
assert:
|
||||
that:
|
||||
- result_success is successful
|
||||
|
||||
- name: import pkcs12 with wrong password
|
||||
java_cert:
|
||||
pkcs12_path: "{{output_dir}}/{{ test_pkcs12_path }}"
|
||||
pkcs12_password: wrong_pass
|
||||
pkcs12_alias: default
|
||||
cert_alias: default_new
|
||||
keystore_path: "{{output_dir}}/{{ test_keystore_path }}"
|
||||
keystore_pass: changeme_keystore
|
||||
keystore_create: yes
|
||||
state: present
|
||||
ignore_errors: true
|
||||
register: result_wrong_pass
|
||||
- name: import pkcs12 with wrong password
|
||||
java_cert:
|
||||
pkcs12_path: "{{output_dir}}/{{ test_pkcs12_path }}"
|
||||
pkcs12_password: wrong_pass
|
||||
pkcs12_alias: default
|
||||
cert_alias: default_new
|
||||
keystore_path: "{{output_dir}}/{{ test_keystore_path }}"
|
||||
keystore_pass: changeme_keystore
|
||||
keystore_create: yes
|
||||
state: present
|
||||
ignore_errors: true
|
||||
register: result_wrong_pass
|
||||
|
||||
- name: verify fail with wrong import password
|
||||
assert:
|
||||
that:
|
||||
- result_wrong_pass is failed
|
||||
- name: verify fail with wrong import password
|
||||
assert:
|
||||
that:
|
||||
- result_wrong_pass is failed
|
||||
|
||||
- name: test fail on mutually exclusive params
|
||||
java_cert:
|
||||
cert_path: ca.crt
|
||||
pkcs12_path: "{{output_dir}}/{{ test_pkcs12_path }}"
|
||||
cert_alias: default
|
||||
keystore_path: "{{output_dir}}/{{ test_keystore_path }}"
|
||||
keystore_pass: changeme_keystore
|
||||
keystore_create: yes
|
||||
state: present
|
||||
ignore_errors: true
|
||||
register: result_excl_params
|
||||
- name: verify failed exclusive params
|
||||
assert:
|
||||
that:
|
||||
- result_excl_params is failed
|
||||
- name: test fail on mutually exclusive params
|
||||
java_cert:
|
||||
cert_path: ca.crt
|
||||
pkcs12_path: "{{output_dir}}/{{ test_pkcs12_path }}"
|
||||
cert_alias: default
|
||||
keystore_path: "{{output_dir}}/{{ test_keystore_path }}"
|
||||
keystore_pass: changeme_keystore
|
||||
keystore_create: yes
|
||||
state: present
|
||||
ignore_errors: true
|
||||
register: result_excl_params
|
||||
- name: verify failed exclusive params
|
||||
assert:
|
||||
that:
|
||||
- result_excl_params is failed
|
||||
|
||||
7
tests/integration/targets/java_keystore/aliases
Normal file
7
tests/integration/targets/java_keystore/aliases
Normal file
@@ -0,0 +1,7 @@
|
||||
destructive
|
||||
shippable/posix/group3
|
||||
skip/aix
|
||||
skip/osx
|
||||
skip/macos
|
||||
skip/freebsd
|
||||
needs/root
|
||||
3
tests/integration/targets/java_keystore/meta/main.yml
Normal file
3
tests/integration/targets/java_keystore/meta/main.yml
Normal file
@@ -0,0 +1,3 @@
|
||||
dependencies:
|
||||
- setup_java_keytool
|
||||
- setup_openssl
|
||||
137
tests/integration/targets/java_keystore/tasks/main.yml
Normal file
137
tests/integration/targets/java_keystore/tasks/main.yml
Normal file
@@ -0,0 +1,137 @@
|
||||
---
|
||||
####################################################################
|
||||
# WARNING: These are designed specifically for Ansible tests #
|
||||
# and should not be used as examples of how to write Ansible roles #
|
||||
####################################################################
|
||||
- when: has_java_keytool
|
||||
block:
|
||||
- name: Create private keys
|
||||
community.crypto.openssl_privatekey:
|
||||
path: "{{ output_dir ~ '/' ~ (item.keyname | default(item.name)) ~ '.key' }}"
|
||||
size: 2048 # this should work everywhere
|
||||
# The following is more efficient, but might not work everywhere:
|
||||
# type: ECC
|
||||
# curve: secp384r1
|
||||
cipher: "{{ 'auto' if item.passphrase is defined else omit }}"
|
||||
passphrase: "{{ item.passphrase | default(omit) }}"
|
||||
loop:
|
||||
- name: cert
|
||||
- name: cert-pw
|
||||
passphrase: hunter2
|
||||
|
||||
- name: Create CSRs
|
||||
community.crypto.openssl_csr:
|
||||
path: "{{ output_dir ~ '/' ~ item.name ~ '.csr' }}"
|
||||
privatekey_path: "{{ output_dir ~ '/' ~ (item.keyname | default(item.name)) ~ '.key' }}"
|
||||
privatekey_passphrase: "{{ item.passphrase | default(omit) }}"
|
||||
commonName: "{{ item.commonName }}"
|
||||
loop:
|
||||
- name: cert
|
||||
commonName: example.com
|
||||
- name: cert-pw
|
||||
passphrase: hunter2
|
||||
commonName: example.com
|
||||
- name: cert2
|
||||
keyname: cert
|
||||
commonName: example.org
|
||||
- name: cert2-pw
|
||||
keyname: cert-pw
|
||||
passphrase: hunter2
|
||||
commonName: example.org
|
||||
|
||||
- name: Create certificates
|
||||
community.crypto.x509_certificate:
|
||||
path: "{{ output_dir ~ '/' ~ item.name ~ '.pem' }}"
|
||||
csr_path: "{{ output_dir ~ '/' ~ item.name ~ '.csr' }}"
|
||||
privatekey_path: "{{ output_dir ~ '/' ~ (item.keyname | default(item.name)) ~ '.key' }}"
|
||||
privatekey_passphrase: "{{ item.passphrase | default(omit) }}"
|
||||
provider: selfsigned
|
||||
loop:
|
||||
- name: cert
|
||||
commonName: example.com
|
||||
- name: cert-pw
|
||||
passphrase: hunter2
|
||||
commonName: example.com
|
||||
- name: cert2
|
||||
keyname: cert
|
||||
commonName: example.org
|
||||
- name: cert2-pw
|
||||
keyname: cert-pw
|
||||
passphrase: hunter2
|
||||
commonName: example.org
|
||||
|
||||
- name: Create a Java key store for the given certificates (check mode)
|
||||
community.general.java_keystore: &create_key_store_data
|
||||
name: example
|
||||
certificate: "{{lookup('file', output_dir ~ '/' ~ item.name ~ '.pem') }}"
|
||||
private_key: "{{lookup('file', output_dir ~ '/' ~ (item.keyname | default(item.name)) ~ '.key') }}"
|
||||
private_key_passphrase: "{{ item.passphrase | default(omit) }}"
|
||||
password: changeit
|
||||
dest: "{{ output_dir ~ '/' ~ item.name ~ '.jks' }}"
|
||||
loop: &create_key_store_loop
|
||||
- name: cert
|
||||
- name: cert-pw
|
||||
passphrase: hunter2
|
||||
check_mode: yes
|
||||
register: result_check
|
||||
|
||||
- name: Create a Java key store for the given certificates
|
||||
community.general.java_keystore: *create_key_store_data
|
||||
loop: *create_key_store_loop
|
||||
register: result
|
||||
|
||||
- name: Create a Java key store for the given certificates (idempotency, check mode)
|
||||
community.general.java_keystore: *create_key_store_data
|
||||
loop: *create_key_store_loop
|
||||
check_mode: yes
|
||||
register: result_idem_check
|
||||
|
||||
- name: Create a Java key store for the given certificates (idempotency)
|
||||
community.general.java_keystore: *create_key_store_data
|
||||
loop: *create_key_store_loop
|
||||
register: result_idem
|
||||
|
||||
- name: Create a Java key store for the given certificates (certificate changed, check mode)
|
||||
community.general.java_keystore: *create_key_store_data
|
||||
loop: &create_key_store_loop_new_certs
|
||||
- name: cert2
|
||||
keyname: cert
|
||||
- name: cert2-pw
|
||||
keyname: cert-pw
|
||||
passphrase: hunter2
|
||||
check_mode: yes
|
||||
register: result_change_check
|
||||
|
||||
- name: Create a Java key store for the given certificates (certificate changed)
|
||||
community.general.java_keystore: *create_key_store_data
|
||||
loop: *create_key_store_loop_new_certs
|
||||
register: result_change
|
||||
|
||||
- name: Create a Java key store for the given certificates (password changed, check mode)
|
||||
community.general.java_keystore:
|
||||
<<: *create_key_store_data
|
||||
password: hunter2
|
||||
loop: *create_key_store_loop_new_certs
|
||||
check_mode: yes
|
||||
register: result_pw_change_check
|
||||
when: false # FIXME: module currently crashes
|
||||
|
||||
- name: Create a Java key store for the given certificates (password changed)
|
||||
community.general.java_keystore:
|
||||
<<: *create_key_store_data
|
||||
password: hunter2
|
||||
loop: *create_key_store_loop_new_certs
|
||||
register: result_pw_change
|
||||
when: false # FIXME: module currently crashes
|
||||
|
||||
- name: Validate results
|
||||
assert:
|
||||
that:
|
||||
- result is changed
|
||||
- result_check is changed
|
||||
- result_idem is not changed
|
||||
- result_idem_check is not changed
|
||||
- result_change is changed
|
||||
- result_change_check is changed
|
||||
# - result_pw_change is changed # FIXME: module currently crashes
|
||||
# - result_pw_change_check is changed # FIXME: module currently crashes
|
||||
@@ -6,3 +6,4 @@ skip/macos
|
||||
skip/freebsd
|
||||
skip/aix
|
||||
skip/python2.6 # python-daemon package used in integration tests requires >=2.7
|
||||
skip/rhel # FIXME
|
||||
|
||||
@@ -4,4 +4,3 @@ destructive
|
||||
skip/aix
|
||||
skip/centos6
|
||||
skip/freebsd
|
||||
disabled # FIXME
|
||||
|
||||
@@ -56,6 +56,15 @@
|
||||
content: "{{ lookup('file', 'job.hcl') }}"
|
||||
register: job_deployed_idempotence
|
||||
|
||||
- name: get list of nomad jobs
|
||||
nomad_job_info:
|
||||
host: localhost
|
||||
use_ssl: false
|
||||
register: list_nomad_jobs
|
||||
|
||||
- debug:
|
||||
msg: "{{ list_nomad_jobs }}"
|
||||
|
||||
- name: run check delete nomad job
|
||||
nomad_job:
|
||||
host: localhost
|
||||
@@ -80,6 +89,15 @@
|
||||
name: example
|
||||
register: get_job_delete
|
||||
|
||||
- name: get list of nomad jobs
|
||||
nomad_job_info:
|
||||
host: localhost
|
||||
use_ssl: false
|
||||
register: list_nomad_jobs
|
||||
|
||||
- debug:
|
||||
msg: "{{ list_nomad_jobs }}"
|
||||
|
||||
- name: assert idempotence
|
||||
assert:
|
||||
that:
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
dependencies:
|
||||
- setup_remote_constraints
|
||||
- setup_pkg_mgr
|
||||
21
tests/integration/targets/setup_java_keytool/tasks/main.yml
Normal file
21
tests/integration/targets/setup_java_keytool/tasks/main.yml
Normal file
@@ -0,0 +1,21 @@
|
||||
---
|
||||
####################################################################
|
||||
# WARNING: These are designed specifically for Ansible tests #
|
||||
# and should not be used as examples of how to write Ansible roles #
|
||||
####################################################################
|
||||
- set_fact:
|
||||
has_java_keytool: >-
|
||||
{{
|
||||
ansible_os_family not in ['Darwin', 'FreeBSD']
|
||||
and not (ansible_distribution == "CentOS" and ansible_distribution_version is version("7.0", "<"))
|
||||
}}
|
||||
|
||||
- name: Include OS-specific variables
|
||||
include_vars: '{{ ansible_os_family }}.yml'
|
||||
when: has_java_keytool
|
||||
|
||||
- name: Install keytool
|
||||
package:
|
||||
name: '{{ keytool_package_name }}'
|
||||
become: true
|
||||
when: has_java_keytool
|
||||
@@ -0,0 +1,2 @@
|
||||
---
|
||||
keytool_package_name: ca-certificates-java
|
||||
@@ -0,0 +1,2 @@
|
||||
---
|
||||
keytool_package_name: java-11-openjdk-headless
|
||||
@@ -0,0 +1,2 @@
|
||||
---
|
||||
keytool_package_name: java-11-openjdk-headless
|
||||
@@ -12,7 +12,7 @@ from ansible_collections.community.general.tests.unit.plugins.modules.utils impo
|
||||
|
||||
|
||||
TEST_OUTPUT = """
|
||||
Process '%s'
|
||||
%s '%s'
|
||||
status %s
|
||||
monitoring status Not monitored
|
||||
monitoring mode active
|
||||
@@ -106,28 +106,45 @@ def test_status_value(status_name):
|
||||
|
||||
|
||||
BASIC_OUTPUT_CASES = [
|
||||
(TEST_OUTPUT % ('processX', name), getattr(monit.Status, name.upper()))
|
||||
(TEST_OUTPUT % ('Process', 'processX', name), getattr(monit.Status, name.upper()))
|
||||
for name in monit.StatusValue.ALL_STATUS
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('output, expected', BASIC_OUTPUT_CASES + [
|
||||
('', monit.Status.MISSING),
|
||||
(TEST_OUTPUT % ('processY', 'OK'), monit.Status.MISSING),
|
||||
(TEST_OUTPUT % ('processX', 'Not Monitored - start pending'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('processX', 'Monitored - stop pending'), monit.Status.NOT_MONITORED),
|
||||
(TEST_OUTPUT % ('processX', 'Monitored - restart pending'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('processX', 'Not Monitored - monitor pending'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('processX', 'Does not exist'), monit.Status.DOES_NOT_EXIST),
|
||||
(TEST_OUTPUT % ('processX', 'Not monitored'), monit.Status.NOT_MONITORED),
|
||||
(TEST_OUTPUT % ('processX', 'Running'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('processX', 'Execution failed | Does not exist'), monit.Status.EXECUTION_FAILED),
|
||||
(TEST_OUTPUT % ('Process', 'processY', 'OK'), monit.Status.MISSING),
|
||||
(TEST_OUTPUT % ('Process', 'processX', 'Not Monitored - start pending'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('Process', 'processX', 'Monitored - stop pending'), monit.Status.NOT_MONITORED),
|
||||
(TEST_OUTPUT % ('Process', 'processX', 'Monitored - restart pending'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('Process', 'processX', 'Not Monitored - monitor pending'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('Process', 'processX', 'Does not exist'), monit.Status.DOES_NOT_EXIST),
|
||||
(TEST_OUTPUT % ('Process', 'processX', 'Not monitored'), monit.Status.NOT_MONITORED),
|
||||
(TEST_OUTPUT % ('Process', 'processX', 'Running'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('Process', 'processX', 'Execution failed | Does not exist'), monit.Status.EXECUTION_FAILED),
|
||||
])
|
||||
def test_parse_status(output, expected):
|
||||
status = monit.Monit(None, '', 'processX', 0)._parse_status(output, '')
|
||||
assert status == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize('output, expected', BASIC_OUTPUT_CASES + [
|
||||
(TEST_OUTPUT % ('Process', 'processX', 'OK'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('File', 'processX', 'OK'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('Fifo', 'processX', 'OK'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('Filesystem', 'processX', 'OK'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('Directory', 'processX', 'OK'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('Remote host', 'processX', 'OK'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('System', 'processX', 'OK'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('Program', 'processX', 'OK'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('Network', 'processX', 'OK'), monit.Status.OK),
|
||||
(TEST_OUTPUT % ('Unsupported', 'processX', 'OK'), monit.Status.MISSING),
|
||||
])
|
||||
def test_parse_status_supports_all_services(output, expected):
|
||||
status = monit.Monit(None, '', 'processX', 0)._parse_status(output, '')
|
||||
assert status == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize('output, expected', [
|
||||
('This is monit version 5.18.1', '5.18.1'),
|
||||
('This is monit version 12.18', '12.18'),
|
||||
|
||||
70
tests/unit/plugins/modules/packaging/language/test_npm.py
Normal file
70
tests/unit/plugins/modules/packaging/language/test_npm.py
Normal file
@@ -0,0 +1,70 @@
|
||||
#
|
||||
# Copyright: (c) 2021, Abhijeet Kasurde <akasurde@redhat.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import call, patch
|
||||
from ansible_collections.community.general.plugins.modules.packaging.language import npm
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.utils import (
|
||||
AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args)
|
||||
|
||||
|
||||
class NPMModuleTestCase(ModuleTestCase):
|
||||
module = npm
|
||||
|
||||
def setUp(self):
|
||||
super(NPMModuleTestCase, self).setUp()
|
||||
ansible_module_path = "ansible_collections.community.general.plugins.modules.packaging.language.npm.AnsibleModule"
|
||||
self.mock_run_command = patch('%s.run_command' % ansible_module_path)
|
||||
self.module_main_command = self.mock_run_command.start()
|
||||
self.mock_get_bin_path = patch('%s.get_bin_path' % ansible_module_path)
|
||||
self.get_bin_path = self.mock_get_bin_path.start()
|
||||
self.get_bin_path.return_value = '/testbin/npm'
|
||||
|
||||
def tearDown(self):
|
||||
self.mock_run_command.stop()
|
||||
self.mock_get_bin_path.stop()
|
||||
super(NPMModuleTestCase, self).tearDown()
|
||||
|
||||
def module_main(self, exit_exc):
|
||||
with self.assertRaises(exit_exc) as exc:
|
||||
self.module.main()
|
||||
return exc.exception.args[0]
|
||||
|
||||
def test_present(self):
|
||||
set_module_args({
|
||||
'name': 'coffee-script',
|
||||
'global': 'true',
|
||||
'state': 'present'
|
||||
})
|
||||
self.module_main_command.side_effect = [
|
||||
(0, '{}', ''),
|
||||
(0, '{}', ''),
|
||||
]
|
||||
|
||||
result = self.module_main(AnsibleExitJson)
|
||||
|
||||
self.assertTrue(result['changed'])
|
||||
self.module_main_command.assert_has_calls([
|
||||
call(['/testbin/npm', 'list', '--json', '--long', '--global'], check_rc=False, cwd=None),
|
||||
])
|
||||
|
||||
def test_absent(self):
|
||||
set_module_args({
|
||||
'name': 'coffee-script',
|
||||
'global': 'true',
|
||||
'state': 'absent'
|
||||
})
|
||||
self.module_main_command.side_effect = [
|
||||
(0, '{"dependencies": {"coffee-script": {}}}', ''),
|
||||
(0, '{}', ''),
|
||||
]
|
||||
|
||||
result = self.module_main(AnsibleExitJson)
|
||||
|
||||
self.assertTrue(result['changed'])
|
||||
self.module_main_command.assert_has_calls([
|
||||
call(['/testbin/npm', 'uninstall', '--global', 'coffee-script'], check_rc=True, cwd=None),
|
||||
])
|
||||
Reference in New Issue
Block a user