Compare commits

..

3 Commits

Author SHA1 Message Date
Felix Fontein
436023be95 Update changelog. 2020-06-29 15:02:58 +02:00
Felix Fontein
ffe16f5fd1 Next release is 0.3.0-experimental.meta.redirects 2020-06-29 15:02:23 +02:00
Felix Fontein
0ec76242c6 Convert symlinks to meta/runtime.yml redirects. 2020-06-29 15:02:12 +02:00
2010 changed files with 28897 additions and 18824 deletions

147
.github/BOTMETA.yml vendored
View File

@@ -9,8 +9,6 @@ files:
$actions/ironware.py:
maintainers: paulquack
labels: ironware networking
$actions/shutdown.py:
authors: nitzmahone samdoran aminvakil
$becomes/:
labels: become
$callbacks/:
@@ -66,17 +64,21 @@ files:
$doc_fragments/hwc.py:
maintainers: $team_huawei
labels: hwc
$doc_fragments/mysql.py:
maintainers: $team_mysql
labels: database mysql
keywords: mariadb proxysql
$doc_fragments/postgres.py:
maintainers: $team_postgresql
labels: postgres postgresql
keywords: database postgres postgresql
$doc_fragments/proxysql.py:
maintainers: $team_mysql
labels: database mysql proxysql
keywords: mariadb proxysql
$doc_fragments/xenserver.py:
maintainers: bvitnik
labels: xenserver
$filters/time.py:
authors: resmo
$filters/jc.py:
authors: kellyjonbrazil
$httpapis/:
maintainers: $team_networking
labels: networking
@@ -119,10 +121,6 @@ files:
$lookups/dig.py:
maintainers: jpmens
labels: dig
$lookups/tss.py:
authors: amigus
$lookups/dsv.py:
authors: amigus
$lookups/hashi_vault.py:
labels: hashi_vault
$lookups/manifold.py:
@@ -161,6 +159,10 @@ files:
$module_utils/memset.py:
maintainers: glitchcrab
labels: cloud memset
$module_utils/mysql.py:
maintainers: $team_mysql
labels: database mysql
keywords: mariadb proxysql
$module_utils/net_tools/nios/api.py:
maintainers: $team_networking sganesh-infoblox
labels: infoblox networking
@@ -198,6 +200,33 @@ files:
authors: krsacme
$modules/cloud/centurylink/:
authors: clc-runner
$modules/cloud/digital_ocean/digital_ocean.py:
authors: zbal
$modules/cloud/digital_ocean/:
authors: Akasurde
maintainers: $team_digital_ocean
keywords:
- digital ocean
- droplet
$modules/cloud/digital_ocean/digital_ocean_firewall_facts.py:
authors: BondAnthony
maintainers: mgregson
$modules/cloud/digital_ocean/digital_ocean_floating_ip_facts.py:
authors: pmarques
$modules/cloud/digital_ocean/digital_ocean_sshkey_facts.py:
authors: pmarques
$modules/cloud/digital_ocean/digital_ocean_block_storage.py:
authors: harneksidhu
$modules/cloud/digital_ocean/digital_ocean_domain.py:
authors: mgregson
maintainers: BondAnthony
$modules/cloud/digital_ocean/digital_ocean_droplet.py:
authors: gurch101
$modules/cloud/digital_ocean/digital_ocean_firewall_info.py:
authors: BondAnthony
maintainers: mgregson
$modules/cloud/digital_ocean/digital_ocean_tag.py:
authors: kontrafiktion
$modules/cloud/dimensiondata/dimensiondata_network.py:
authors: aimonb
maintainers: tintoy
@@ -247,9 +276,6 @@ files:
$modules/cloud/docker/docker_stack.py:
authors: dariko
maintainers: DBendit WojciechowskiPiotr akshay196 danihodovic felixfontein jwitko kassiansun tbouvet
$modules/cloud/docker/docker_stack_task_info.py:
authors: imjoseangel
maintainers: $team_docker
$modules/cloud/docker/docker_swarm.py:
authors: WojciechowskiPiotr tbouvet
maintainers: DBendit akshay196 danihodovic dariko felixfontein jwitko kassiansun
@@ -409,9 +435,6 @@ files:
$modules/cloud/scaleway/:
authors: sieben
maintainers: $team_scaleway
$modules/cloud/scaleway/scaleway_database_backup.py:
authors: guillaume_ro_fr
maintainers: $team_scaleway
$modules/cloud/scaleway/scaleway_image_info.py:
authors: Spredzy sieben
$modules/cloud/scaleway/scaleway_ip_info.py:
@@ -498,8 +521,6 @@ files:
authors: ThePixelDeveloper samdoran
$modules/database/misc/kibana_plugin.py:
authors: barryib
$modules/database/misc/odbc.py:
authors: john-westcott-iv
$modules/database/misc/redis.py:
authors: slok
$modules/database/misc/riak.py:
@@ -508,46 +529,65 @@ files:
authors: vedit
maintainers: Jmainguy kenichi-ogawa-1988
labels: mssql_db
$modules/database/mysql/mysql_db.py:
authors: ansible
maintainers: $team_mysql
$modules/database/mysql/:
authors: Andersson007
maintainers: Alexander198961 Xyon bmalynovytch bmildren kurtdavis michaelcoburn oneiroi tolland
keywords: mariadb proxysql
$modules/database/mysql/mysql_replication.py:
authors: Andersson007 banyek
$modules/database/mysql/mysql_user.py:
authors: Jmainguy bmalynovytch
maintainers: Alexander198961 Andersson007 Xyon bmildren kurtdavis michaelcoburn oneiroi tolland
ignore: tomaszkiewicz
$modules/database/mysql/mysql_variables.py:
authors: banyek
maintainers: $team_mysql
$modules/database/postgresql/postgresql_db.py:
authors: ansible
maintainers: $team_postgresql
$modules/database/postgresql/postgresql_ext.py:
authors: Andersson007 andytom dschep strk
maintainers: $team_postgresql
maintainers: Dorn- amenonsen jbscalia kostiantyn-nemchenko matburt nerzhul sebasmannem tcraxs
$modules/database/postgresql/:
authors: Andersson007
maintainers: Dorn- amenonsen andytom jbscalia kostiantyn-nemchenko matburt nerzhul sebasmannem tcraxs
keywords: database postgres postgresql
maintainers: $team_postgresql
$modules/database/postgresql/postgresql_lang.py:
authors: andytom jensdepuydt
maintainers: $team_postgresql
maintainers: Andersson007 Dorn- amenonsen jbscalia kostiantyn-nemchenko matburt nerzhul sebasmannem tcraxs
$modules/database/postgresql/postgresql_pg_hba.py:
authors: sebasmannem
maintainers: $team_postgresql
maintainers: Andersson007 Dorn- amenonsen andytom jbscalia kostiantyn-nemchenko matburt nerzhul tcraxs
$modules/database/postgresql/postgresql_privs.py:
authors: b6d tcraxs
maintainers: $team_postgresql
maintainers: Andersson007 Dorn- amenonsen andytom jbscalia kostiantyn-nemchenko matburt nerzhul sebasmannem
$modules/database/postgresql/postgresql_publication.py:
authors: Andersson007 nerzhul
maintainers: $team_postgresql
maintainers: Dorn- amenonsen andytom jbscalia kostiantyn-nemchenko matburt sebasmannem tcraxs
$modules/database/postgresql/postgresql_query.py:
authors: Andersson007 archf wrouesnel
maintainers: $team_postgresql
$modules/database/postgresql/postgresql_schema.py:
authors: Dorn- andytom
maintainers: $team_postgresql
maintainers: Andersson007 amenonsen jbscalia kostiantyn-nemchenko matburt nerzhul sebasmannem tcraxs
$modules/database/postgresql/postgresql_sequence.py:
authors: tcraxs
maintainers: $team_postgresql
maintainers: Andersson007 Dorn- amenonsen andytom jbscalia kostiantyn-nemchenko matburt nerzhul sebasmannem
$modules/database/postgresql/postgresql_slot.py:
authors: Andersson007 jscalia
maintainers: $team_postgresql
$modules/database/postgresql/postgresql_tablespace.py:
authors: Andersson007 Dorn- antoinell
maintainers: $team_postgresql
maintainers: amenonsen andytom jbscalia kostiantyn-nemchenko matburt nerzhul sebasmannem tcraxs
$modules/database/postgresql/postgresql_user.py:
authors: ansible
maintainers: $team_postgresql
$modules/database/proxysql/:
authors: bmildren
maintainers: Alexander198961 Andersson007 Xyon bmalynovytch kurtdavis michaelcoburn oneiroi tolland
labels: mysql
keywords: mariadb proxysql
$modules/database/vertica/:
authors: dareko
$modules/files/archive.py:
@@ -633,7 +673,6 @@ files:
labels: monit
$modules/monitoring/nagios.py:
authors: tbielawa
maintainers: tgoetheyn
$modules/monitoring/newrelic_deployment.py:
authors: mcodd
$modules/monitoring/pagerduty.py:
@@ -692,9 +731,6 @@ files:
$modules/net_tools/ldap/ldap_passwd.py:
authors: KellerFuchs
maintainers: jtyr
$modules/net_tools/ldap/ldap_search.py:
authors: eryx12o45
maintainers: jtyr
$modules/net_tools/lldp.py:
authors: andyhky
labels: lldp
@@ -1011,9 +1047,8 @@ files:
$modules/remote_management/oneview/oneview_fcoe_network.py:
authors: fgbulsoni
$modules/remote_management/redfish/:
authors: jose-delarosa billdodd
maintainers: $team_redfish
ignore: jose-delarosa
authors: jose-delarosa
maintainers: billdodd mraineri tomasg2012
$modules/remote_management/stacki/stacki_host.py:
authors: bbyhuy
maintainers: bsanders
@@ -1043,29 +1078,23 @@ files:
$modules/source_control/gitlab/:
notify: jlozadad
authors: Lunik marwatk
maintainers: $team_gitlab
maintainers: Shaps dj-wasabi waheedi
keywords: gitlab source_control
$modules/source_control/gitlab/gitlab_group.py:
authors: Lunik dj-wasabi
maintainers: $team_gitlab
$modules/source_control/gitlab/gitlab_group_members.py:
authors: zanssa
maintainers: $team_gitlab
$modules/source_control/gitlab/gitlab_group_variable.py:
authors: scodeman
maintainers: $team_gitlab
maintainers: Shaps marwatk waheedi
$modules/source_control/gitlab/gitlab_project.py:
authors: Lunik dj-wasabi
maintainers: $team_gitlab
maintainers: Shaps marwatk waheedi
$modules/source_control/gitlab/gitlab_project_variable.py:
authors: markuman
maintainers: $team_gitlab
$modules/source_control/gitlab/gitlab_runner.py:
authors: Lunik SamyCoenen
maintainers: $team_gitlab
maintainers: Shaps dj-wasabi marwatk waheedi
$modules/source_control/gitlab/gitlab_user.py:
authors: Lunik dj-wasabi
maintainers: $team_gitlab
maintainers: Shaps marwatk waheedi
$modules/source_control/hg.py:
authors: yeukhon
$modules/storage/emc/emc_vnx_sg_member.py:
@@ -1139,8 +1168,6 @@ files:
authors: groks
$modules/system/dconf.py:
authors: azaghal
$modules/system/dpkg_divert.py:
authors: quidame
$modules/system/facter.py:
authors: ansible
labels: facter
@@ -1148,6 +1175,8 @@ files:
authors: abulimov
maintainers: pilou-
labels: filesystem
$modules/system/firewalld.py:
authors: maxamillion
$modules/system/gconftool2.py:
authors: kevensen
maintainers: Akasurde
@@ -1156,16 +1185,12 @@ files:
authors: hryamzik
maintainers: obourdon
labels: interfaces_file
$modules/system/iptables_state.py:
authors: quidame
$modules/system/java_cert.py:
authors: haad
$modules/system/java_keystore.py:
authors: Mogztter
$modules/system/kernel_blacklist.py:
authors: matze
$modules/system/launchd.py:
authors: martinm82
$modules/system/lbu.py:
authors: kunkku
$modules/system/listen_ports_facts.py:
@@ -1237,8 +1262,6 @@ files:
authors: bcoca
$modules/system/syspatch.py:
authors: precurse
$modules/system/sysupgrade.py:
authors: precurse
$modules/system/timezone.py:
authors: indrajitr jasperla tmshn
$modules/system/ufw.py:
@@ -1263,7 +1286,7 @@ files:
authors: ramondelafuente
$modules/web_infrastructure/django_manage.py:
authors: tastychutney
maintainers: scottanderson42 russoz
maintainers: scottanderson42
labels: django_manage
$modules/web_infrastructure/ejabberd_user.py:
authors: privateip
@@ -1341,10 +1364,11 @@ macros:
team_aix: MorrisA bcoca d-little flynn1973 gforster kairoaraujo marvin-sinister mator molekuul ramooncamacho wtcross
team_bsd: JoergFiedler MacLemon bcoca dch jasperla mekanix opoplawski overhacked tuxillo
team_cyberark_conjur: jvanderhoof ryanprior
team_digital_ocean: BondAnthony mgregson
team_docker: DBendit WojciechowskiPiotr akshay196 danihodovic dariko felixfontein jwitko kassiansun tbouvet
team_e_spirit: MatrixCrawler getjack
team_extreme: LindsayHill bigmstone ujwalkomarla
team_gitlab: Lunik Shaps dj-wasabi marwatk waheedi zanssa scodeman
team_gitlab: Lunik Shaps dj-wasabi marwatk waheedi
team_google: erjohnso rambleraptor
team_hpux: bcoca davx8342
team_huawei: QijunPan TommyLike edisonxiang freesky-edward hwDCN niuzhenguo xuxiaowei0512 yanzhangi zengchen1024 zhongjun2
@@ -1354,17 +1378,18 @@ macros:
team_linode: InTheCloudDan decentral1se displague rmcintosh
team_macos: akasurde kyleabenson martinm82
team_manageiq: abellotti cben gtanzillo yaacov zgalor
team_mysql: Alexander198961 Andersson007 Xyon bmalynovytch bmildren kurtdavis michaelcoburn oneiroi tolland
team_netapp: amit0701 carchi8py hulquest lmprice lonico ndswartz schmots1
team_netscaler: chiradeep giorgos-nikolopoulos
team_netvisor: Qalthos amitsi csharpe-pn pdam preetiparasar
team_networking: NilashishC Qalthos danielmellado ganeshrn justjais trishnaguha
team_oracle: manojmeda mross22 nalsaber
team_postgresql: Andersson007 Dorn- andytom jbscalia kostiantyn-nemchenko matburt nerzhul sebasmannem tcraxs ilicmilan
team_postgresql: Andersson007 Dorn- amenonsen andytom jbscalia kostiantyn-nemchenko matburt nerzhul sebasmannem tcraxs
team_purestorage: bannaych dnix101 genegr lionmax opslounge raekins sdodsley sile16
team_rabbitmq: chrishoffman manuel-sousa
team_redfish: billdodd mraineri tomasg2012
team_redfish: billdodd jose-delarosa mraineri tomasg2012
team_rhn: FlossWare alikins barnabycourt vritant
team_scaleway: QuentinBrosse abarbare jerome-quere kindermoumoute remyleone
team_scaleway: DenBeke QuentinBrosse abarbare jerome-quere kindermoumoute remyleone
team_solaris: bcoca fishman jasperla jpdasma mator scathatheworm troy2914 xen0l
team_suse: commel dcermak evrardjp lrupp toabctl
team_virt: joshainglis karmab Aversiste
team_virt: joshainglis karmab

View File

@@ -1,5 +0,0 @@
---
backport_branch_prefix: patchback/backports/
backport_label_prefix: backport-
target_branch_prefix: stable-
...

View File

@@ -1,49 +0,0 @@
name: "Code scanning - action"
on:
schedule:
- cron: '26 19 * * 1'
jobs:
CodeQL-Build:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
with:
# We must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head.
fetch-depth: 2
# If this run was triggered by a pull request event, then checkout
# the head of the pull request instead of the merge commit.
- run: git checkout HEAD^2
if: ${{ github.event_name == 'pull_request' }}
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v1
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

View File

@@ -5,195 +5,32 @@ Community General Release Notes
.. contents:: Topics
v1.2.0
======
v0.3.0-experimental.meta.redirects
==================================
Release Summary
---------------
Regular bimonthly minor release.
Minor Changes
-------------
- hashi_vault - support ``VAULT_NAMESPACE`` environment variable for namespaced lookups against Vault Enterprise (in addition to the ``namespace=`` flag supported today) (https://github.com/ansible-collections/community.general/pull/929).
- hashi_vault lookup - add ``VAULT_TOKEN_FILE`` as env option to specify ``token_file`` param (https://github.com/ansible-collections/community.general/issues/373).
- hashi_vault lookup - add ``VAULT_TOKEN_PATH`` as env option to specify ``token_path`` param (https://github.com/ansible-collections/community.general/issues/373).
- ipa_user - add ``userauthtype`` option (https://github.com/ansible-collections/community.general/pull/951).
- iptables_state - use FQCN when calling a module from action plugin (https://github.com/ansible-collections/community.general/pull/967).
- nagios - add the ``acknowledge`` action (https://github.com/ansible-collections/community.general/pull/820).
- nagios - add the ``host`` and ``all`` values for the ``forced_check`` action (https://github.com/ansible-collections/community.general/pull/998).
- nagios - add the ``service_check`` action (https://github.com/ansible-collections/community.general/pull/820).
- nagios - rename the ``service_check`` action to ``forced_check`` since we now are able to check both a particular service, all services of a particular host and the host itself (https://github.com/ansible-collections/community.general/pull/998).
- pkgutil - module can now accept a list of packages (https://github.com/ansible-collections/community.general/pull/799).
- pkgutil - module has a new option, ``force``, equivalent to the ``-f`` option to the `pkgutil <http://pkgutil.net/>`_ command (https://github.com/ansible-collections/community.general/pull/799).
- pkgutil - module now supports check mode (https://github.com/ansible-collections/community.general/pull/799).
- postgresql_privs - add the ``usage_on_types`` option (https://github.com/ansible-collections/community.general/issues/884).
- proxmox_kvm - improve code readability (https://github.com/ansible-collections/community.general/pull/934).
- pushover - add device parameter (https://github.com/ansible-collections/community.general/pull/802).
- redfish_command - add sub-command for ``EnableContinuousBootOverride`` and ``DisableBootOverride`` to allow setting BootSourceOverrideEnabled Redfish property (https://github.com/ansible-collections/community.general/issues/824).
- redfish_command - support same reset actions on Managers as on Systems (https://github.com/ansible-collections/community.general/issues/901).
- slack - add support for updating messages (https://github.com/ansible-collections/community.general/issues/304).
- xml - fixed issue were changed was returned when removing non-existent xpath (https://github.com/ansible-collections/community.general/pull/1007).
- zypper_repository - proper failure when python-xml is missing (https://github.com/ansible-collections/community.general/pull/939).
Bugfixes
--------
- aerospike_migrations - handle exception when unstable-cluster is returned (https://github.com/ansible-collections/community.general/pull/900).
- django_manage - fix idempotence for ``createcachetable`` (https://github.com/ansible-collections/community.general/pull/699).
- docker_container - fix idempotency problem with ``published_ports`` when strict comparison is used and list is empty (https://github.com/ansible-collections/community.general/issues/978).
- gem - fix get_installed_versions: correctly parse ``default`` version (https://github.com/ansible-collections/community.general/pull/783).
- hashi_vault - add missing ``mount_point`` parameter for approle auth (https://github.com/ansible-collections/community.general/pull/897).
- hashi_vault lookup - ``token_path`` in config file overridden by env ``HOME`` (https://github.com/ansible-collections/community.general/issues/373).
- homebrew_cask - fixed issue where a cask with ``@`` in the name is incorrectly reported as invalid (https://github.com/ansible-collections/community.general/issues/733).
- interfaces_file - escape regular expression characters in old value (https://github.com/ansible-collections/community.general/issues/777).
- launchd - fix for user-level services (https://github.com/ansible-collections/community.general/issues/896).
- nmcli - set ``C`` locale when executing ``nmcli`` (https://github.com/ansible-collections/community.general/issues/989).
- parted - fix creating partition when label is changed (https://github.com/ansible-collections/community.general/issues/522).
- pkg5 - now works when Python 3 is used on the target (https://github.com/ansible-collections/community.general/pull/789).
- postgresql_privs - allow to pass ``PUBLIC`` role written in lowercase letters (https://github.com/ansible-collections/community.general/issues/857).
- postgresql_privs - fix the module mistakes a procedure for a function (https://github.com/ansible-collections/community.general/issues/994).
- postgresql_privs - rollback if nothing changed (https://github.com/ansible-collections/community.general/issues/885).
- postgresql_privs - the module was attempting to revoke grant options even though ``grant_option`` was not specified (https://github.com/ansible-collections/community.general/pull/796).
- proxmox_kvm - defer error-checking for non-existent VMs in order to fix idempotency of tasks using ``state=absent`` and properly recognize a success (https://github.com/ansible-collections/community.general/pull/811).
- proxmox_kvm - improve handling of long-running tasks by creating a dedicated function (https://github.com/ansible-collections/community.general/pull/831).
- slack - fix ``xox[abp]`` token identification to capture everything after ``xox[abp]``, as the token is the only thing that should be in this argument (https://github.com/ansible-collections/community.general/issues/862).
- terraform - fix incorrectly reporting a status of unchanged when number of resources added or destroyed are multiples of 10 (https://github.com/ansible-collections/community.general/issues/561).
- timezone - support Python3 on macos/darwin (https://github.com/ansible-collections/community.general/pull/945).
- zfs - fixed ``invalid character '@' in pool name"`` error when working with snapshots on a root zvol (https://github.com/ansible-collections/community.general/issues/932).
New Plugins
-----------
Inventory
~~~~~~~~~
- proxmox - Proxmox inventory source
- stackpath_compute - StackPath Edge Computing inventory source
New Modules
-----------
Cloud
~~~~~
scaleway
^^^^^^^^
- scaleway_database_backup - Scaleway database backups management module
Source Control
~~~~~~~~~~~~~~
gitlab
^^^^^^
- gitlab_group_members - Manage group members on GitLab Server
- gitlab_group_variable - Creates, updates, or deletes GitLab groups variables
v1.1.0
======
Release Summary
---------------
Release for Ansible 2.10.0.
This is a experimental release to test whether ``meta/runtime.yml`` redirects work as expected for flatmapping. There will be no final 0.3.0 release; the next release will be 1.0.0. This experimental pre-release will not appear in the versioning history of 0.3.0, it is solely for testing certain technical aspects of ansible-base and antsibull.
Minor Changes
-------------
- The collection dependencies where adjusted so that ``community.kubernetes`` and ``google.cloud`` are required to be of version 1.0.0 or newer (https://github.com/ansible-collections/community.general/pull/774).
- jc - new filter to convert the output of many shell commands and file-types to JSON. Uses the jc library at https://github.com/kellyjonbrazil/jc. For example, filtering the STDOUT output of ``uname -a`` via ``{{ result.stdout | community.general.jc('uname') }}``. Requires Python 3.6+ (https://github.com/ansible-collections/community.general/pull/750).
- xfconf - add support for ``double`` type (https://github.com/ansible-collections/community.general/pull/744).
Bugfixes
--------
- cobbler inventory plugin - ``name`` needed FQCN (https://github.com/ansible-collections/community.general/pull/722).
- dsv lookup - use correct dict usage (https://github.com/ansible-collections/community.general/pull/743).
- inventory plugins - allow FQCN in ``plugin`` option (https://github.com/ansible-collections/community.general/pull/722).
- ipa_hostgroup - fix an issue with load-balanced ipa and cookie handling with Python 3 (https://github.com/ansible-collections/community.general/issues/737).
- oc connection plugin - ``transport`` needed FQCN (https://github.com/ansible-collections/community.general/pull/722).
- postgresql_set - allow to pass an empty string to the ``value`` parameter (https://github.com/ansible-collections/community.general/issues/775).
- xfconf - make it work in non-english locales (https://github.com/ansible-collections/community.general/pull/744).
New Modules
-----------
Cloud
~~~~~
docker
^^^^^^
- docker_stack_task_info - Return information of the tasks on a docker stack
System
~~~~~~
- iptables_state - Save iptables state into a file or restore it from a file
- shutdown - Shut down a machine
- sysupgrade - Manage OpenBSD system upgrades
v1.0.0
======
Release Summary
---------------
This is release 1.0.0 of ``community.general``, released on 2020-07-31.
Minor Changes
-------------
- Add the ``gcpubsub``, ``gcpubsub_info`` and ``gcpubsub_facts`` (to be removed in 3.0.0) modules. These were originally in community.general, but removed on the assumption that they have been moved to google.cloud. Since this turned out to be incorrect, we re-added them for 1.0.0.
- Add the deprecated ``gcp_backend_service``, ``gcp_forwarding_rule`` and ``gcp_healthcheck`` modules, which will be removed in 2.0.0. These were originally in community.general, but removed on the assumption that they have been moved to google.cloud. Since this turned out to be incorrect, we re-added them for 1.0.0.
- The collection is now actively tested in CI with the latest Ansible 2.9 release.
- airbrake_deployment - add ``version`` param; clarified docs on ``revision`` param (https://github.com/ansible-collections/community.general/pull/583).
- apk - added ``no_cache`` option (https://github.com/ansible-collections/community.general/pull/548).
- firewalld - the module has been moved to the ``ansible.posix`` collection. A redirection is active, which will be removed in version 2.0.0 (https://github.com/ansible-collections/community.general/pull/623).
- gitlab_project - add support for merge_method on projects (https://github.com/ansible/ansible/pull/66813).
- gitlab_runners inventory plugin - permit environment variable input for ``server_url``, ``api_token`` and ``filter`` options (https://github.com/ansible-collections/community.general/pull/611).
- haproxy - add options to dis/enable health and agent checks. When health and agent checks are enabled for a service, a disabled service will re-enable itself automatically. These options also change the state of the agent checks to match the requested state for the backend (https://github.com/ansible-collections/community.general/issues/684).
- log_plays callback - use v2 methods (https://github.com/ansible-collections/community.general/pull/442).
- logstash callback - add ini config (https://github.com/ansible-collections/community.general/pull/610).
- lxd_container - added support of ``--target`` flag for cluster deployments (https://github.com/ansible-collections/community.general/issues/637).
- parted - accept negative numbers in ``part_start`` and ``part_end``
- pkgng - added ``stdout`` and ``stderr`` attributes to the result (https://github.com/ansible-collections/community.general/pull/560).
- pkgng - added support for upgrading all packages using ``name: *, state: latest``, similar to other package providers (https://github.com/ansible-collections/community.general/pull/569).
- postgresql_query - add search_path parameter (https://github.com/ansible-collections/community.general/issues/625).
- rundeck_acl_policy - add check for rundeck_acl_policy name parameter (https://github.com/ansible-collections/community.general/pull/612).
- slack - add support for sending messages built with block kit (https://github.com/ansible-collections/community.general/issues/380).
- splunk callback - add an option to allow not to validate certificate from HEC (https://github.com/ansible-collections/community.general/pull/596).
- xfconf - add arrays support (https://github.com/ansible/ansible/issues/46308).
- xfconf - add support for ``uint`` type (https://github.com/ansible-collections/community.general/pull/696).
Breaking Changes / Porting Guide
--------------------------------
- log_plays callback - add missing information to the logs generated by the callback plugin. This changes the log message format (https://github.com/ansible-collections/community.general/pull/442).
- pkgng - passing ``name: *`` with ``state: absent`` will no longer remove every installed package from the system. It is now a noop. (https://github.com/ansible-collections/community.general/pull/569).
- pkgng - passing ``name: *`` with ``state: latest`` or ``state: present`` will no longer install every package from the configured package repositories. Instead, ``name: *, state: latest`` will upgrade all already-installed packages, and ``name: *, state: present`` is a noop. (https://github.com/ansible-collections/community.general/pull/569).
Deprecated Features
-------------------
- The ldap_attr module has been deprecated and will be removed in a later release; use ldap_attrs instead.
- xbps - the ``force`` option never had any effect. It is now deprecated, and will be removed in 3.0.0 (https://github.com/ansible-collections/community.general/pull/568).
Removed Features (previously deprecated)
----------------------------------------
- conjur_variable lookup - has been moved to the ``cyberark.conjur`` collection. A redirection is active, which will be removed in version 2.0.0 (https://github.com/ansible-collections/community.general/pull/570).
- digital_ocean_* - all DigitalOcean modules have been moved to the ``community.digitalocean`` collection. A redirection is active, which will be removed in version 2.0.0 (https://github.com/ansible-collections/community.general/pull/622).
- infini_* - all infinidat modules have been moved to the ``infinidat.infinibox`` collection. A redirection is active, which will be removed in version 2.0.0 (https://github.com/ansible-collections/community.general/pull/607).
- logicmonitor - the module has been removed in 1.0.0 since it is unmaintained and the API used by the module has been turned off in 2017 (https://github.com/ansible-collections/community.general/issues/539, https://github.com/ansible-collections/community.general/pull/541).
- logicmonitor_facts - the module has been removed in 1.0.0 since it is unmaintained and the API used by the module has been turned off in 2017 (https://github.com/ansible-collections/community.general/issues/539, https://github.com/ansible-collections/community.general/pull/541).
- mysql_* - all MySQL modules have been moved to the ``community.mysql`` collection. A redirection is active, which will be removed in version 2.0.0 (https://github.com/ansible-collections/community.general/pull/633).
- proxysql_* - all ProxySQL modules have been moved to the ``community.proxysql`` collection. A redirection is active, which will be removed in version 2.0.0 (https://github.com/ansible-collections/community.general/pull/624).
Bugfixes
--------
@@ -201,68 +38,9 @@ Bugfixes
- aix_filesystem - fix issues with ismount module_util pathing for Ansible 2.9 (https://github.com/ansible-collections/community.general/pull/567).
- consul_kv lookup - fix ``ANSIBLE_CONSUL_URL`` environment variable handling (https://github.com/ansible/ansible/issues/51960).
- consul_kv lookup - fix arguments handling (https://github.com/ansible-collections/community.general/pull/303).
- digital_ocean_tag_info - fix crash when querying for an individual tag (https://github.com/ansible-collections/community.general/pull/615).
- doas become plugin - address a bug with the parameters handling that was breaking the plugin in community.general when ``become_flags`` and ``become_user`` were not explicitly specified (https://github.com/ansible-collections/community.general/pull/704).
- docker_compose - add a condition to prevent service startup if parameter ``stopped`` is true. Otherwise, the service will be started on each play and stopped again immediately due to the ``stopped`` parameter and breaks the idempotency of the module (https://github.com/ansible-collections/community.general/issues/532).
- docker_compose - disallow usage of the parameters ``stopped`` and ``restarted`` at the same time. This breaks also the idempotency (https://github.com/ansible-collections/community.general/issues/532).
- docker_container - use Config MacAddress by default instead of Networks. Networks MacAddress is empty in some cases (https://github.com/ansible/ansible/issues/70206).
- docker_container - various error fixes in string handling for Python 2 to avoid crashes when non-ASCII characters are used in strings (https://github.com/ansible-collections/community.general/issues/640).
- docker_swarm - removes ``advertise_addr`` from list of required arguments when ``state`` is ``"join"`` (https://github.com/ansible-collections/community.general/issues/439).
- dzdo become plugin - address a bug with the parameters handling that was breaking the plugin in community.general when ``become_user`` was not explicitly specified (https://github.com/ansible-collections/community.general/pull/708).
- filesystem - resizefs of xfs filesystems is fixed. Filesystem needs to be mounted.
- jenkins_plugin - replace MD5 checksum verification with SHA1 due to MD5 being disabled on systems with FIPS-only algorithms enabled (https://github.com/ansible/ansible/issues/34304).
- jira - improve error message handling (https://github.com/ansible-collections/community.general/pull/311).
- jira - improve error message handling with multiple errors (https://github.com/ansible-collections/community.general/pull/707).
- kubevirt - Add aliases 'interface_name' for network_name (https://github.com/ansible/ansible/issues/55641).
- nmcli - fix idempotetency when modifying an existing connection (https://github.com/ansible-collections/community.general/issues/481).
- osx_defaults - fix handling negative integers (https://github.com/ansible-collections/community.general/issues/134).
- pacman - treat package names containing .zst as package files during installation (https://www.archlinux.org/news/now-using-zstandard-instead-of-xz-for-package-compression/, https://github.com/ansible-collections/community.general/pull/650).
- pbrun become plugin - address a bug with the parameters handling that was breaking the plugin in community.general when ``become_user`` was not explicitly specified (https://github.com/ansible-collections/community.general/pull/708).
- postgresql_privs - fix crash when set privileges on schema with hyphen in the name (https://github.com/ansible-collections/community.general/issues/656).
- postgresql_set - only display a warning about restarts, when restarting is needed (https://github.com/ansible-collections/community.general/pull/651).
- redfish_info, redfish_config, redfish_command - Fix Redfish response payload decode on Python 3.5 (https://github.com/ansible-collections/community.general/issues/686)
- selective - mark task failed correctly (https://github.com/ansible/ansible/issues/63767).
- snmp_facts - skip ``EndOfMibView`` values (https://github.com/ansible/ansible/issues/49044).
- yarn - fixed an index out of range error when no outdated packages where returned by yarn executable (see https://github.com/ansible-collections/community.general/pull/474).
- yarn - fixed an too many values to unpack error when scoped packages are installed (see https://github.com/ansible-collections/community.general/pull/474).
New Plugins
-----------
Inventory
~~~~~~~~~
- cobbler - Cobbler inventory source
Lookup
~~~~~~
- dsv - Get secrets from Thycotic DevOps Secrets Vault
- tss - Get secrets from Thycotic Secret Server
New Modules
-----------
Cloud
~~~~~
docker
^^^^^^
- docker_stack_info - Return information on a docker stack
Database
~~~~~~~~
misc
^^^^
- odbc - Execute SQL via ODBC
System
~~~~~~
- launchd - Manage macOS services
v0.2.0
======
@@ -281,6 +59,7 @@ Major Changes
- docker_container - the ``network_mode`` option will be set by default to the name of the first network in ``networks`` if at least one network is given and ``networks_cli_compatible`` is ``true`` (will be default from community.general 2.0.0 on). Set to an explicit value to avoid deprecation warnings if you specify networks and set ``networks_cli_compatible`` to ``true``. The current default (not specifying it) is equivalent to the value ``default``.
- docker_container - the module has a new option, ``container_default_behavior``, whose default value will change from ``compatibility`` to ``no_defaults``. Set to an explicit value to avoid deprecation warnings.
- gitlab_user - no longer requires ``name``, ``email`` and ``password`` arguments when ``state=absent``.
- zabbix_action - no longer requires ``esc_period`` and ``event_source`` arguments when ``state=absent``.
Minor Changes
-------------
@@ -417,6 +196,16 @@ Minor Changes
- terraform - Adds option ``backend_config_files``. This can accept a list of paths to multiple configuration files (https://github.com/ansible-collections/community.general/pull/394).
- terraform - Adds option ``variables_files`` for multiple var-files (https://github.com/ansible-collections/community.general/issues/224).
- ufw - accept ``interface_in`` and ``interface_out`` as parameters.
- zabbix_action - allow str values for ``esc_period`` options (https://github.com/ansible/ansible/pull/66841).
- zabbix_host - now supports configuring user macros and host tags on the managed host (see https://github.com/ansible/ansible/pull/66777)
- zabbix_host_info - ``host_name`` based search results now include host groups.
- zabbix_hostmacro - ``macro_name`` now accepts macros in zabbix native format as well (e.g. ``{$MACRO}``)
- zabbix_hostmacro - ``macro_value`` is no longer required when ``state=absent``
- zabbix_proxy - ``interface`` sub-options ``type`` and ``main`` are now deprecated and will be removed in community.general 3.0.0. Also, the values passed to ``interface`` are now checked for correct types and unexpected keys.
- zabbix_proxy - added option proxy_address for comma-delimited list of IP/CIDR addresses or DNS names to accept active proxy requests from
- zabbix_template - add new option omit_date to remove date from exported/dumped template (https://github.com/ansible/ansible/pull/67302)
- zabbix_template - adding new update rule templateLinkage.deleteMissing for newer zabbix versions (https://github.com/ansible/ansible/pull/66747).
- zabbix_template_info - add new option omit_date to remove date from exported/dumped template (https://github.com/ansible/ansible/pull/67302)
- zypper - Added ``allow_vendor_change`` and ``replacefiles`` zypper options (https://github.com/ansible-collections/community.general/issues/381)
Breaking Changes / Porting Guide
@@ -447,6 +236,7 @@ Deprecated Features
- redfish_config - the ``bios_attribute_name`` and ``bios_attribute_value`` options will be removed. To maintain the existing behavior use the ``bios_attributes`` option instead.
- redfish_config and redfish_command - the behavior to select the first System, Manager, or Chassis resource to modify when multiple are present will be removed. Use the new ``resource_id`` option to specify target resource to modify.
- redfish_config, redfish_command - Behavior to modify the first System, Mananger, or Chassis resource when multiple are present is deprecated. Use the new ``resource_id`` option to specify target resource to modify.
- zabbix_proxy - deprecates ``interface`` sub-options ``type`` and ``main`` when proxy type is set to passive via ``status=passive``. Make sure these suboptions are removed from your playbook as they were never supported by Zabbix in the first place.
Removed Features (previously deprecated)
----------------------------------------
@@ -614,6 +404,13 @@ Bugfixes
- terraform module - fixes usage for providers not supporting workspaces
- yarn - Return correct values when running yarn in check mode (https://github.com/ansible-collections/community.general/pull/153).
- yarn - handle no version when installing module by name (https://github.com/ansible/ansible/issues/55097)
- zabbix_action - arguments ``event_source`` and ``esc_period`` no longer required when ``state=absent``
- zabbix_host - fixed inventory_mode key error, which occurs with Zabbix 4.4.1 or more (https://github.com/ansible/ansible/issues/65304).
- zabbix_host - was not possible to update a host where visible_name was not set in zabbix
- zabbix_mediatype - Fixed to support zabbix 4.4 or more and python3 (https://github.com/ansible/ansible/pull/67693)
- zabbix_template - fixed error when providing empty ``link_templates`` to the module (see https://github.com/ansible/ansible/issues/66417)
- zabbix_template - fixed invalid (non-importable) output provided by exporting XML (see https://github.com/ansible/ansible/issues/66466)
- zabbix_user - Fixed an issue where module failed with zabbix 4.4 or above (see https://github.com/ansible/ansible/pull/67475)
- zfs_delegate_admin - add missing choices diff/hold/release to the permissions parameter (https://github.com/ansible-collections/community.general/pull/278)
New Plugins

View File

@@ -1,22 +1,15 @@
# Community General Collection
[![Run Status](https://api.shippable.com/projects/5e664a167c32620006c9fa50/badge?branch=master)](https://app.shippable.com/github/ansible-collections/community.general/dashboard) [![Codecov](https://img.shields.io/codecov/c/github/ansible-collections/community.general)](https://codecov.io/gh/ansible-collections/community.general)
[![Run Status](https://api.shippable.com/projects/5e664a167c32620006c9fa50/badge?branch=main)](https://app.shippable.com/github/ansible-collections/community.general/dashboard) [![Codecov](https://img.shields.io/codecov/c/github/ansible-collections/community.general)](https://codecov.io/gh/ansible-collections/community.general)
# Ansible Collection: community.general
This repo contains the `community.general` Ansible Collection. The collection includes many modules and plugins supported by Ansible community which are not part of more specialized community collections.
This repo contains the `community.general` Ansible Collection.
## Tested with Ansible
The collection includes the modules and plugins supported by Ansible community.
Tested with the current Ansible 2.9 and 2.10 releases and the current development version of Ansible. Ansible versions before 2.9.10 are not supported.
## External requirements
## Installation and Usage
Some modules and plugins require external libraries. Please check the requirements for each plugin or module you use in the documentation to find out which requirements are needed.
## Included content
Please check the included content on the [Ansible Galaxy page for this collection](https://galaxy.ansible.com/community/general).
## Using this collection
### Installing the Collection from Ansible Galaxy
Before using the General community collection, you need to install the collection with the `ansible-galaxy` CLI:
@@ -29,19 +22,34 @@ collections:
- name: community.general
```
See [Ansible Using collections](https://docs.ansible.com/ansible/latest/user_guide/collections_using.html) for more details.
## Testing and Development
## Contributing to this collection
If you want to develop new content for this collection or improve what is already here, the easiest way to work on the collection is to clone it into one of the configured [`COLLECTIONS_PATHS`](https://docs.ansible.com/ansible/latest/reference_appendices/config.html#collections-paths), and work on it there.
If you want to develop new content for this collection or improve what is already here, the easiest way to work on the collection is to clone it into one of the configured [`COLLECTIONS_PATH`](https://docs.ansible.com/ansible/latest/reference_appendices/config.html#collections-paths), and work on it there.
You can find more information in the [developer guide for collections](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#contributing-to-collections)
You can find more information in the [developer guide for collections](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#contributing-to-collections), and in the [Ansible Community Guide](https://docs.ansible.com/ansible/latest/community/index.html).
### Running tests
### Testing with `ansible-test`
See [here](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#testing-collections).
### Communication
## Release notes
See [here](https://github.com/ansible-collections/community.general/tree/master/CHANGELOG.rst).
## Publishing New Version
Basic instructions without release branches:
1. Create `changelogs/fragments/<version>.yml` with `release_summary:` section (which must be a string, not a list).
2. Run `antsibull-changelog release --collection-flatmap yes`
3. Make sure `CHANGELOG.rst` and `changelogs/changelog.yaml` are added to git, and the deleted fragments have been removed.
4. Tag the commit with `<version>`. Push changes and tag to the main repository.
## More Information
TBD
## Communication
We have a dedicated Working Group for Ansible development.
@@ -54,34 +62,8 @@ For more information about communities, meetings and agendas see [Community Wiki
For more information about [communication](https://docs.ansible.com/ansible/latest/community/communication.html)
### Publishing New Version
## License
Basic instructions without release branches:
GNU General Public License v3.0 or later
1. Create `changelogs/fragments/<version>.yml` with `release_summary:` section (which must be a string, not a list).
2. Run `antsibull-changelog release --collection-flatmap yes`
3. Make sure `CHANGELOG.rst` and `changelogs/changelog.yaml` are added to git, and the deleted fragments have been removed.
4. Tag the commit with `<version>`. Push changes and tag to the main repository.
## Release notes
See the [changelog](https://github.com/ansible-collections/community.general/blob/main/CHANGELOG.rst).
## Roadmap
See [this issue](https://github.com/ansible-collections/community.general/issues/582) for information on releasing, versioning and deprecation.
In general, we plan to release a major version every six months, and minor versions every two months. Major versions can contain breaking changes, while minor versions only contain new features and bugfixes.
## More information
- [Ansible Collection overview](https://github.com/ansible-collections/overview)
- [Ansible User guide](https://docs.ansible.com/ansible/latest/user_guide/index.html)
- [Ansible Developer guide](https://docs.ansible.com/ansible/latest/dev_guide/index.html)
- [Ansible Community code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html)
## Licensing
GNU General Public License v3.0 or later.
See [COPYING](https://www.gnu.org/licenses/gpl-3.0.txt) to see the full text.
See [LICENSE](COPYING) to see the full text.

View File

@@ -245,6 +245,19 @@ releases:
- terraform module - fixes usage for providers not supporting workspaces
- yarn - Return correct values when running yarn in check mode (https://github.com/ansible-collections/community.general/pull/153).
- yarn - handle no version when installing module by name (https://github.com/ansible/ansible/issues/55097)
- zabbix_action - arguments ``event_source`` and ``esc_period`` no longer required
when ``state=absent``
- zabbix_host - fixed inventory_mode key error, which occurs with Zabbix 4.4.1
or more (https://github.com/ansible/ansible/issues/65304).
- zabbix_host - was not possible to update a host where visible_name was not
set in zabbix
- zabbix_mediatype - Fixed to support zabbix 4.4 or more and python3 (https://github.com/ansible/ansible/pull/67693)
- zabbix_template - fixed error when providing empty ``link_templates`` to the
module (see https://github.com/ansible/ansible/issues/66417)
- zabbix_template - fixed invalid (non-importable) output provided by exporting
XML (see https://github.com/ansible/ansible/issues/66466)
- zabbix_user - Fixed an issue where module failed with zabbix 4.4 or above
(see https://github.com/ansible/ansible/pull/67475)
- zfs_delegate_admin - add missing choices diff/hold/release to the permissions
parameter (https://github.com/ansible-collections/community.general/pull/278)
deprecated_features:
@@ -284,6 +297,10 @@ releases:
- redfish_config, redfish_command - Behavior to modify the first System, Mananger,
or Chassis resource when multiple are present is deprecated. Use the new ``resource_id``
option to specify target resource to modify.
- zabbix_proxy - deprecates ``interface`` sub-options ``type`` and ``main``
when proxy type is set to passive via ``status=passive``. Make sure these
suboptions are removed from your playbook as they were never supported by
Zabbix in the first place.
major_changes:
- docker_container - the ``network_mode`` option will be set by default to the
name of the first network in ``networks`` if at least one network is given
@@ -296,6 +313,8 @@ releases:
Set to an explicit value to avoid deprecation warnings.
- gitlab_user - no longer requires ``name``, ``email`` and ``password`` arguments
when ``state=absent``.
- zabbix_action - no longer requires ``esc_period`` and ``event_source`` arguments
when ``state=absent``.
minor_changes:
- A new filter ``to_time_unit`` with specializations ``to_milliseconds``, ``to_seconds``,
``to_minutes``, ``to_hours``, ``to_days``, ``to_weeks``, ``to_months`` and
@@ -476,6 +495,24 @@ releases:
paths to multiple configuration files (https://github.com/ansible-collections/community.general/pull/394).
- terraform - Adds option ``variables_files`` for multiple var-files (https://github.com/ansible-collections/community.general/issues/224).
- ufw - accept ``interface_in`` and ``interface_out`` as parameters.
- zabbix_action - allow str values for ``esc_period`` options (https://github.com/ansible/ansible/pull/66841).
- zabbix_host - now supports configuring user macros and host tags on the managed
host (see https://github.com/ansible/ansible/pull/66777)
- zabbix_host_info - ``host_name`` based search results now include host groups.
- zabbix_hostmacro - ``macro_name`` now accepts macros in zabbix native format
as well (e.g. ``{$MACRO}``)
- zabbix_hostmacro - ``macro_value`` is no longer required when ``state=absent``
- zabbix_proxy - ``interface`` sub-options ``type`` and ``main`` are now deprecated
and will be removed in community.general 3.0.0. Also, the values passed to
``interface`` are now checked for correct types and unexpected keys.
- zabbix_proxy - added option proxy_address for comma-delimited list of IP/CIDR
addresses or DNS names to accept active proxy requests from
- zabbix_template - add new option omit_date to remove date from exported/dumped
template (https://github.com/ansible/ansible/pull/67302)
- zabbix_template - adding new update rule templateLinkage.deleteMissing for
newer zabbix versions (https://github.com/ansible/ansible/pull/66747).
- zabbix_template_info - add new option omit_date to remove date from exported/dumped
template (https://github.com/ansible/ansible/pull/67302)
- zypper - Added ``allow_vendor_change`` and ``replacefiles`` zypper options
(https://github.com/ansible-collections/community.general/issues/381)
release_summary: 'This is the first proper release of the ``community.general``
@@ -643,8 +680,10 @@ releases:
- 63629-postgresql_db_pgc_support.yaml
- 63887-docker_swarm_service-sort-lists-when-checking-changes.yml
- 63903-ufw.yaml
- 63969-zabbix_action_argsfix.yml
- 63990-replace-deprecated-basic-functions.yml
- 64007-postgresql_db_allow_user_name_with_dots.yml
- 64032-zabbix_template_fix_return_XML_as_a_string_even_python3.yml
- 64059-mysql_user_fix_password_comparison.yaml
- 64288-fix-hashi-vault-kv-v2.yaml
- 64371-postgresql_privs-always-reports-as-changed-when-using-default_privs.yml
@@ -665,6 +704,7 @@ releases:
- 65164-postgres_use_query_params_with_cursor.yml
- 65223-postgresql_db-exception-added.yml
- 65238-fix_pacman_stdout_parsing.yml
- 65304-fix_zabbix_host_inventory_mode_key_error.yml
- 65310-postgresql_owner_use_query_params.yml
- 65372-misc-context-manager.yml
- 65387-homebrew_check_mode_option.yml
@@ -686,11 +726,13 @@ releases:
- 65894-redfish-bios-attributes.yaml
- 65903-postgresql_privs_sort_lists_with_none_elements.yml
- 65993-restart-docker_container-on-restart-policy-updates.yaml
- 66026-zabbix_host_info.yml
- 66048-mysql_add_master_data_parameter.yml
- 66060-redfish-new-resource-id-option.yaml
- 66144-docker_container-removal-timeout.yml
- 66151-docker_swarm_service-healthcheck-start-period.yml
- 66157-postgresql-create-unique-indexes.yml
- 66247-zabbix_proxy-address-field.yaml
- 66252-mysql_replication_fail_on_error.yml
- 66268-cyberarkpassword-fix-invalid-attr.yaml
- 66322-moved_line_causing_terraform_output_suppression.yml
@@ -698,20 +740,26 @@ releases:
- 66357-support-changing-fetch_url-settings-for-rundeck-modules.yaml
- 66382-docker_container-port-range.yml
- 66398-pamd_fix-attributeerror-when-removing-first-line.yml
- 66463-zabbix_template-fix-error-linktemplate-and-importdump.yml
- 66592_ipa_encoding_fix.yml
- 66599-docker-healthcheck.yml
- 66600-docker_container-volumes.yml
- 66688-mysql_db_add_skip_lock_tables_option.yml
- 66711-postgresql_user_add_comment_parameter.yml
- 66717-postgresql_db_add_dump_extra_args_param.yml
- 66747-zabbix_template-newupdaterule-deletemissinglinkedtemplate.yml
- 66777-zabbix_host_tags_macros_support.yml
- 66801-mysql_user_priv_can_be_dict.yml
- 66806-mysql_variables_not_support_variables_with_dot.yml
- 66807-redhat_subscription-no-default-quantity.yaml
- 66837-zabbix-proxy-interface.yml
- 66841-zabbix_action-allowstrfor-esc_period.yml
- 66914-purefa_user_string.yaml
- 66929-pmrun-quote-entire-success-command-string.yml
- 66957-scaleway-jsonify-only-for-json-requests.yml
- 66974-mysql_user_doesnt_support_privs_with_underscore.yml
- 67046-postgresql_modules_make_params_required.yml
- 67302-zabbix_template_info-add-omit_date-field.yml
- 67337-fix-proxysql-mysql-cursor.yaml
- 67353-docker_login-permissions.yml
- 67418-postgresql_set_converts_value_to_uppercase.yml
@@ -719,6 +767,7 @@ releases:
- 67464-postgresql_info_add_collecting_subscription_info.yml
- 67614-postgresql_info_add_collecting_publication_info.yml
- 67655-scaleway_compute-get-image-instead-loop-on-list.yml
- 67693-zabbix_mediatype.yml
- 67747-mysql_db_add_dump_extra_args_param.yml
- 67767-mysql_db_fix_bug_introduced_by_56721.yml
- 67832-run_powershell_modules_on_windows_containers.yml
@@ -737,6 +786,7 @@ releases:
- firewalld-version-0_7_0.yml
- firewalld_zone_target.yml
- fix-oc-conn-plugin-envvar.yml
- fix_zabbix_host_visible_name.yml
- gitlab-project-variable-variable-type.yml
- gitlab_project_variable.yml
- ldap-params-removal.yml
@@ -755,6 +805,8 @@ releases:
- solaris_zone_name_fix.yml
- syslogger-disable-check-mode.yaml
- xml-deprecated-functions.yml
- zabbix-hostmacro.yml
- zabbix_user-mediatype-error.yml
modules:
- description: Override a debian package's version of a file
name: dpkg_divert
@@ -853,381 +905,52 @@ releases:
name: lmdb_kv
namespace: null
release_date: '2020-06-20'
1.0.0:
0.3.0-experimental.meta.redirects:
changes:
breaking_changes:
- log_plays callback - add missing information to the logs generated by the
callback plugin. This changes the log message format (https://github.com/ansible-collections/community.general/pull/442).
- 'pkgng - passing ``name: *`` with ``state: absent`` will no longer remove
every installed package from the system. It is now a noop. (https://github.com/ansible-collections/community.general/pull/569).'
- 'pkgng - passing ``name: *`` with ``state: latest`` or ``state: present``
will no longer install every package from the configured package repositories.
Instead, ``name: *, state: latest`` will upgrade all already-installed packages,
and ``name: *, state: present`` is a noop. (https://github.com/ansible-collections/community.general/pull/569).'
bugfixes:
- aix_filesystem - fix issues with ismount module_util pathing for Ansible 2.9
(https://github.com/ansible-collections/community.general/pull/567).
- consul_kv lookup - fix ``ANSIBLE_CONSUL_URL`` environment variable handling
(https://github.com/ansible/ansible/issues/51960).
- consul_kv lookup - fix arguments handling (https://github.com/ansible-collections/community.general/pull/303).
- digital_ocean_tag_info - fix crash when querying for an individual tag (https://github.com/ansible-collections/community.general/pull/615).
- doas become plugin - address a bug with the parameters handling that was breaking
the plugin in community.general when ``become_flags`` and ``become_user``
were not explicitly specified (https://github.com/ansible-collections/community.general/pull/704).
- docker_compose - add a condition to prevent service startup if parameter ``stopped``
is true. Otherwise, the service will be started on each play and stopped again
immediately due to the ``stopped`` parameter and breaks the idempotency of
the module (https://github.com/ansible-collections/community.general/issues/532).
- docker_compose - disallow usage of the parameters ``stopped`` and ``restarted``
at the same time. This breaks also the idempotency (https://github.com/ansible-collections/community.general/issues/532).
- docker_container - use Config MacAddress by default instead of Networks. Networks
MacAddress is empty in some cases (https://github.com/ansible/ansible/issues/70206).
- docker_container - various error fixes in string handling for Python 2 to
avoid crashes when non-ASCII characters are used in strings (https://github.com/ansible-collections/community.general/issues/640).
- docker_swarm - removes ``advertise_addr`` from list of required arguments
when ``state`` is ``"join"`` (https://github.com/ansible-collections/community.general/issues/439).
- dzdo become plugin - address a bug with the parameters handling that was breaking
the plugin in community.general when ``become_user`` was not explicitly specified
(https://github.com/ansible-collections/community.general/pull/708).
- filesystem - resizefs of xfs filesystems is fixed. Filesystem needs to be
mounted.
- jenkins_plugin - replace MD5 checksum verification with SHA1 due to MD5 being
disabled on systems with FIPS-only algorithms enabled (https://github.com/ansible/ansible/issues/34304).
- jira - improve error message handling (https://github.com/ansible-collections/community.general/pull/311).
- jira - improve error message handling with multiple errors (https://github.com/ansible-collections/community.general/pull/707).
- kubevirt - Add aliases 'interface_name' for network_name (https://github.com/ansible/ansible/issues/55641).
- nmcli - fix idempotetency when modifying an existing connection (https://github.com/ansible-collections/community.general/issues/481).
- osx_defaults - fix handling negative integers (https://github.com/ansible-collections/community.general/issues/134).
- pacman - treat package names containing .zst as package files during installation
(https://www.archlinux.org/news/now-using-zstandard-instead-of-xz-for-package-compression/,
https://github.com/ansible-collections/community.general/pull/650).
- pbrun become plugin - address a bug with the parameters handling that was
breaking the plugin in community.general when ``become_user`` was not explicitly
specified (https://github.com/ansible-collections/community.general/pull/708).
- postgresql_privs - fix crash when set privileges on schema with hyphen in
the name (https://github.com/ansible-collections/community.general/issues/656).
- postgresql_set - only display a warning about restarts, when restarting is
needed (https://github.com/ansible-collections/community.general/pull/651).
- redfish_info, redfish_config, redfish_command - Fix Redfish response payload
decode on Python 3.5 (https://github.com/ansible-collections/community.general/issues/686)
- selective - mark task failed correctly (https://github.com/ansible/ansible/issues/63767).
- snmp_facts - skip ``EndOfMibView`` values (https://github.com/ansible/ansible/issues/49044).
- yarn - fixed an index out of range error when no outdated packages where returned
by yarn executable (see https://github.com/ansible-collections/community.general/pull/474).
- yarn - fixed an too many values to unpack error when scoped packages are installed
(see https://github.com/ansible-collections/community.general/pull/474).
deprecated_features:
- The ldap_attr module has been deprecated and will be removed in a later release;
use ldap_attrs instead.
- xbps - the ``force`` option never had any effect. It is now deprecated, and
will be removed in 3.0.0 (https://github.com/ansible-collections/community.general/pull/568).
minor_changes:
- Add the ``gcpubsub``, ``gcpubsub_info`` and ``gcpubsub_facts`` (to be removed
in 3.0.0) modules. These were originally in community.general, but removed
on the assumption that they have been moved to google.cloud. Since this turned
out to be incorrect, we re-added them for 1.0.0.
- Add the deprecated ``gcp_backend_service``, ``gcp_forwarding_rule`` and ``gcp_healthcheck``
modules, which will be removed in 2.0.0. These were originally in community.general,
but removed on the assumption that they have been moved to google.cloud. Since
this turned out to be incorrect, we re-added them for 1.0.0.
- The collection is now actively tested in CI with the latest Ansible 2.9 release.
- airbrake_deployment - add ``version`` param; clarified docs on ``revision``
param (https://github.com/ansible-collections/community.general/pull/583).
- apk - added ``no_cache`` option (https://github.com/ansible-collections/community.general/pull/548).
- firewalld - the module has been moved to the ``ansible.posix`` collection.
A redirection is active, which will be removed in version 2.0.0 (https://github.com/ansible-collections/community.general/pull/623).
- gitlab_project - add support for merge_method on projects (https://github.com/ansible/ansible/pull/66813).
- gitlab_runners inventory plugin - permit environment variable input for ``server_url``,
``api_token`` and ``filter`` options (https://github.com/ansible-collections/community.general/pull/611).
- haproxy - add options to dis/enable health and agent checks. When health
and agent checks are enabled for a service, a disabled service will re-enable
itself automatically. These options also change the state of the agent checks
to match the requested state for the backend (https://github.com/ansible-collections/community.general/issues/684).
- log_plays callback - use v2 methods (https://github.com/ansible-collections/community.general/pull/442).
- logstash callback - add ini config (https://github.com/ansible-collections/community.general/pull/610).
- lxd_container - added support of ``--target`` flag for cluster deployments
(https://github.com/ansible-collections/community.general/issues/637).
- parted - accept negative numbers in ``part_start`` and ``part_end``
- pkgng - added ``stdout`` and ``stderr`` attributes to the result (https://github.com/ansible-collections/community.general/pull/560).
- 'pkgng - added support for upgrading all packages using ``name: *, state:
latest``, similar to other package providers (https://github.com/ansible-collections/community.general/pull/569).'
- postgresql_query - add search_path parameter (https://github.com/ansible-collections/community.general/issues/625).
- rundeck_acl_policy - add check for rundeck_acl_policy name parameter (https://github.com/ansible-collections/community.general/pull/612).
- slack - add support for sending messages built with block kit (https://github.com/ansible-collections/community.general/issues/380).
- splunk callback - add an option to allow not to validate certificate from
HEC (https://github.com/ansible-collections/community.general/pull/596).
- xfconf - add arrays support (https://github.com/ansible/ansible/issues/46308).
- xfconf - add support for ``uint`` type (https://github.com/ansible-collections/community.general/pull/696).
release_summary: 'This is release 1.0.0 of ``community.general``, released on
2020-07-31.
release_summary: 'This is a experimental release to test whether ``meta/runtime.yml``
redirects work as expected for flatmapping. There will be no final 0.3.0 release;
the next release will be 1.0.0. This experimental pre-release will not appear
in the versioning history of 0.3.0, it is solely for testing certain technical
aspects of ansible-base and antsibull.
'
removed_features:
- conjur_variable lookup - has been moved to the ``cyberark.conjur`` collection.
A redirection is active, which will be removed in version 2.0.0 (https://github.com/ansible-collections/community.general/pull/570).
- digital_ocean_* - all DigitalOcean modules have been moved to the ``community.digitalocean``
collection. A redirection is active, which will be removed in version 2.0.0
(https://github.com/ansible-collections/community.general/pull/622).
- infini_* - all infinidat modules have been moved to the ``infinidat.infinibox``
collection. A redirection is active, which will be removed in version 2.0.0
(https://github.com/ansible-collections/community.general/pull/607).
- logicmonitor - the module has been removed in 1.0.0 since it is unmaintained
and the API used by the module has been turned off in 2017 (https://github.com/ansible-collections/community.general/issues/539,
https://github.com/ansible-collections/community.general/pull/541).
- logicmonitor_facts - the module has been removed in 1.0.0 since it is unmaintained
and the API used by the module has been turned off in 2017 (https://github.com/ansible-collections/community.general/issues/539,
https://github.com/ansible-collections/community.general/pull/541).
- mysql_* - all MySQL modules have been moved to the ``community.mysql`` collection.
A redirection is active, which will be removed in version 2.0.0 (https://github.com/ansible-collections/community.general/pull/633).
- proxysql_* - all ProxySQL modules have been moved to the ``community.proxysql``
collection. A redirection is active, which will be removed in version 2.0.0
(https://github.com/ansible-collections/community.general/pull/624).
fragments:
- 1.0.0.yml
- 296-ansible-2.9.yml
- 0.3.0-experimental-meta-redirects.yml
- 303-consul_kv-fix-env-variables-handling.yaml
- 311-jira-error-handling.yaml
- 33979-xfs_growfs.yml
- 442-log_plays-add_playbook_task_name_and_action.yml
- 474-yarn_fix-outdated-fix-list.yml
- 547-start-service-condition.yaml
- 548_apk.yml
- 55903_kubevirt.yml
- 560-pkgng-add-stdout-and-stderr.yaml
- 562-nmcli-fix-idempotency.yaml
- 564-docker_container_use_config_macaddress_by_default.yaml
- 568_packaging.yml
- 569-pkgng-add-upgrade-action.yaml
- 596-splunk-add-option-to-not-validate-cert.yaml
- 610_logstash_callback_add_ini_config.yml
- 611-gitlab-runners-env-vars-intput-and-default-item-limit.yaml
- 613-snmp_facts-EndOfMibView.yml
- 615-digital-ocean-tag-info-bugfix.yml
- 63767_selective.yml
- 642-docker_container-python-2.yml
- 646-docker_swarm-remove-advertise_addr-from-join-requirement.yaml
- 650_pacman_support_zst_package_files.yaml
- 651-fix-postgresql_set-warning.yaml
- 653-postgresql_query_add_search_path_param.yml
- 656-name-with-hyphen.yml
- 66813_gitlab_project.yml
- 676-osx_defaults_fix_handling_negative_ints.yml
- 677-jenkins_plugins_sha1.yaml
- 687-fix-redfish-payload-decode-python35.yml
- 689-haproxy_agent_and_health.yml
- 693-big-revamp-on-xfconf-adding-array-values.yml
- 702-slack-support-for-blocks.yaml
- 704-doas-set-correct-default-values.yml
- 707-jira-error-handling.yaml
- 708-set-correct-default-values.yml
- 711-lxd-target.yml
- add_argument_check_for_rundeck.yaml
- airbrake_deployment_add_version.yml
- aix_filesystem-module_util-routing-issue.yml
- cyberarkconjur-removal.yml
- digital-ocean.yml
- firewalld_migration.yml
- google-modules.yml
- infinidat-removal.yml
- logicmonitor-removal.yml
- mysql.yml
- parted_negative_numbers.yml
- porting-guide-2.yml
- proxysql.yml
- xfconf_add_uint_type.yml
modules:
- description: Return information on a docker stack
name: docker_stack_info
namespace: cloud.docker
- description: Manage macOS services
name: launchd
namespace: system
- description: Execute SQL via ODBC
name: odbc
namespace: database.misc
plugins:
inventory:
- description: Cobbler inventory source
name: cobbler
namespace: null
lookup:
- description: Get secrets from Thycotic DevOps Secrets Vault
name: dsv
namespace: null
- description: Get secrets from Thycotic Secret Server
name: tss
namespace: null
release_date: '2020-07-31'
1.1.0:
changes:
bugfixes:
- cobbler inventory plugin - ``name`` needed FQCN (https://github.com/ansible-collections/community.general/pull/722).
- dsv lookup - use correct dict usage (https://github.com/ansible-collections/community.general/pull/743).
- inventory plugins - allow FQCN in ``plugin`` option (https://github.com/ansible-collections/community.general/pull/722).
- ipa_hostgroup - fix an issue with load-balanced ipa and cookie handling with
Python 3 (https://github.com/ansible-collections/community.general/issues/737).
- oc connection plugin - ``transport`` needed FQCN (https://github.com/ansible-collections/community.general/pull/722).
- postgresql_set - allow to pass an empty string to the ``value`` parameter
(https://github.com/ansible-collections/community.general/issues/775).
- xfconf - make it work in non-english locales (https://github.com/ansible-collections/community.general/pull/744).
minor_changes:
- The collection dependencies where adjusted so that ``community.kubernetes``
and ``google.cloud`` are required to be of version 1.0.0 or newer (https://github.com/ansible-collections/community.general/pull/774).
- jc - new filter to convert the output of many shell commands and file-types
to JSON. Uses the jc library at https://github.com/kellyjonbrazil/jc. For
example, filtering the STDOUT output of ``uname -a`` via ``{{ result.stdout
| community.general.jc('uname') }}``. Requires Python 3.6+ (https://github.com/ansible-collections/community.general/pull/750).
- xfconf - add support for ``double`` type (https://github.com/ansible-collections/community.general/pull/744).
release_summary: 'Release for Ansible 2.10.0.
'
fragments:
- 1.1.0.yml
- 722-plugins.yml
- 738-ipa-python3.yml
- 744-xfconf_make_locale-independent.yml
- 750-jc-new-filter.yaml
- 776-postgresql_set_allow_empty_string.yaml
- dsv_fix.yml
- galaxy-yml.yml
modules:
- description: Return information of the tasks on a docker stack
name: docker_stack_task_info
namespace: cloud.docker
- description: Save iptables state into a file or restore it from a file
name: iptables_state
namespace: system
- description: Shut down a machine
name: shutdown
namespace: system
- description: Manage OpenBSD system upgrades
name: sysupgrade
namespace: system
release_date: '2020-08-18'
1.2.0:
changes:
bugfixes:
- aerospike_migrations - handle exception when unstable-cluster is returned
(https://github.com/ansible-collections/community.general/pull/900).
- django_manage - fix idempotence for ``createcachetable`` (https://github.com/ansible-collections/community.general/pull/699).
- docker_container - fix idempotency problem with ``published_ports`` when strict
comparison is used and list is empty (https://github.com/ansible-collections/community.general/issues/978).
- 'gem - fix get_installed_versions: correctly parse ``default`` version (https://github.com/ansible-collections/community.general/pull/783).'
- hashi_vault - add missing ``mount_point`` parameter for approle auth (https://github.com/ansible-collections/community.general/pull/897).
- hashi_vault lookup - ``token_path`` in config file overridden by env ``HOME``
(https://github.com/ansible-collections/community.general/issues/373).
- homebrew_cask - fixed issue where a cask with ``@`` in the name is incorrectly
reported as invalid (https://github.com/ansible-collections/community.general/issues/733).
- interfaces_file - escape regular expression characters in old value (https://github.com/ansible-collections/community.general/issues/777).
- launchd - fix for user-level services (https://github.com/ansible-collections/community.general/issues/896).
- nmcli - set ``C`` locale when executing ``nmcli`` (https://github.com/ansible-collections/community.general/issues/989).
- parted - fix creating partition when label is changed (https://github.com/ansible-collections/community.general/issues/522).
- pkg5 - now works when Python 3 is used on the target (https://github.com/ansible-collections/community.general/pull/789).
- postgresql_privs - allow to pass ``PUBLIC`` role written in lowercase letters
(https://github.com/ansible-collections/community.general/issues/857).
- postgresql_privs - fix the module mistakes a procedure for a function (https://github.com/ansible-collections/community.general/issues/994).
- postgresql_privs - rollback if nothing changed (https://github.com/ansible-collections/community.general/issues/885).
- postgresql_privs - the module was attempting to revoke grant options even
though ``grant_option`` was not specified (https://github.com/ansible-collections/community.general/pull/796).
- proxmox_kvm - defer error-checking for non-existent VMs in order to fix idempotency
of tasks using ``state=absent`` and properly recognize a success (https://github.com/ansible-collections/community.general/pull/811).
- proxmox_kvm - improve handling of long-running tasks by creating a dedicated
function (https://github.com/ansible-collections/community.general/pull/831).
- slack - fix ``xox[abp]`` token identification to capture everything after
``xox[abp]``, as the token is the only thing that should be in this argument
(https://github.com/ansible-collections/community.general/issues/862).
- terraform - fix incorrectly reporting a status of unchanged when number of
resources added or destroyed are multiples of 10 (https://github.com/ansible-collections/community.general/issues/561).
- timezone - support Python3 on macos/darwin (https://github.com/ansible-collections/community.general/pull/945).
- zfs - fixed ``invalid character '@' in pool name"`` error when working with
snapshots on a root zvol (https://github.com/ansible-collections/community.general/issues/932).
minor_changes:
- hashi_vault - support ``VAULT_NAMESPACE`` environment variable for namespaced
lookups against Vault Enterprise (in addition to the ``namespace=`` flag supported
today) (https://github.com/ansible-collections/community.general/pull/929).
- hashi_vault lookup - add ``VAULT_TOKEN_FILE`` as env option to specify ``token_file``
param (https://github.com/ansible-collections/community.general/issues/373).
- hashi_vault lookup - add ``VAULT_TOKEN_PATH`` as env option to specify ``token_path``
param (https://github.com/ansible-collections/community.general/issues/373).
- ipa_user - add ``userauthtype`` option (https://github.com/ansible-collections/community.general/pull/951).
- iptables_state - use FQCN when calling a module from action plugin (https://github.com/ansible-collections/community.general/pull/967).
- nagios - add the ``acknowledge`` action (https://github.com/ansible-collections/community.general/pull/820).
- nagios - add the ``host`` and ``all`` values for the ``forced_check`` action
(https://github.com/ansible-collections/community.general/pull/998).
- nagios - add the ``service_check`` action (https://github.com/ansible-collections/community.general/pull/820).
- nagios - rename the ``service_check`` action to ``forced_check`` since we
now are able to check both a particular service, all services of a particular
host and the host itself (https://github.com/ansible-collections/community.general/pull/998).
- pkgutil - module can now accept a list of packages (https://github.com/ansible-collections/community.general/pull/799).
- pkgutil - module has a new option, ``force``, equivalent to the ``-f`` option
to the `pkgutil <http://pkgutil.net/>`_ command (https://github.com/ansible-collections/community.general/pull/799).
- pkgutil - module now supports check mode (https://github.com/ansible-collections/community.general/pull/799).
- postgresql_privs - add the ``usage_on_types`` option (https://github.com/ansible-collections/community.general/issues/884).
- proxmox_kvm - improve code readability (https://github.com/ansible-collections/community.general/pull/934).
- pushover - add device parameter (https://github.com/ansible-collections/community.general/pull/802).
- redfish_command - add sub-command for ``EnableContinuousBootOverride`` and
``DisableBootOverride`` to allow setting BootSourceOverrideEnabled Redfish
property (https://github.com/ansible-collections/community.general/issues/824).
- redfish_command - support same reset actions on Managers as on Systems (https://github.com/ansible-collections/community.general/issues/901).
- slack - add support for updating messages (https://github.com/ansible-collections/community.general/issues/304).
- xml - fixed issue were changed was returned when removing non-existent xpath
(https://github.com/ansible-collections/community.general/pull/1007).
- zypper_repository - proper failure when python-xml is missing (https://github.com/ansible-collections/community.general/pull/939).
release_summary: Regular bimonthly minor release.
fragments:
- 1.2.0.yml
- 522-parted_change_label.yml
- 563-update-terraform-status-test.yaml
- 699-django_manage-createcachetable-fix-idempotence.yml
- 777-interfaces_file-re-escape.yml
- 783-fix-gem-installed-versions.yaml
- 789-pkg5-wrap-to-modify-package-list.yaml
- 796-postgresql_privs-grant-option-bug.yaml
- 802-pushover-device-parameter.yml
- 811-proxmox-kvm-state-absent.yml
- 820_nagios_added_acknowledge_and_servicecheck.yml
- 825-bootsource-override-option.yaml
- 831-proxmox-kvm-wait.yml
- 843-update-slack-messages.yml
- 858-postgresql_privs_should_allow_public_role_lowercased.yml
- 887-rollback-if-nothing-changed.yml
- 892-slack-token-validation.yml
- 897-lookup-plugin-hashivault-add-approle-mount-point.yaml
- 899_launchd_user_service.yml
- 900-aerospike-migration-handle-unstable-cluster.yaml
- 902-hashi_vault-token-path.yml
- 903-enhance-redfish-manager-reset-actions.yml
- 929-vault-namespace-support.yml
- 939-zypper_repository_proper_failure_on_missing_python-xml.yml
- 941-postgresql_privs_usage_on_types_option.yml
- 943-proxmox-kvm-code-cleanup.yml
- 945-darwin-timezone-py3.yaml
- 951-ipa_user-add-userauthtype-param.yaml
- 967-use-fqcn-when-calling-a-module-from-action-plugin.yml
- 979-docker_container-published_ports-empty-idempotency.yml
- 992-nmcli-locale.yml
- 996-postgresql_privs_fix_function_handling.yml
- 998-nagios-added_forced_check_for_all_services_or_host.yml
- homebrew-cask-at-symbol-fix.yaml
- pkgutil-check-mode-etc.yaml
- xml-remove-changed.yml
- zfs-root-snapshot.yml
modules:
- description: Manage group members on GitLab Server
name: gitlab_group_members
namespace: source_control.gitlab
- description: Creates, updates, or deletes GitLab groups variables
name: gitlab_group_variable
namespace: source_control.gitlab
- description: Scaleway database backups management module
name: scaleway_database_backup
namespace: cloud.scaleway
plugins:
inventory:
- description: Proxmox inventory source
name: proxmox
namespace: null
- description: StackPath Edge Computing inventory source
name: stackpath_compute
namespace: null
release_date: '2020-09-30'
release_date: '2020-06-27'

View File

@@ -0,0 +1,4 @@
minor_changes:
- log_plays callback - use v2 methods (https://github.com/ansible-collections/community.general/pull/442).
breaking_changes:
- log_plays callback - add missing information to the logs generated by the callback plugin. This changes the log message format (https://github.com/ansible-collections/community.general/pull/442).

View File

@@ -0,0 +1,3 @@
---
bugfixes:
- kubevirt - Add aliases 'interface_name' for network_name (https://github.com/ansible/ansible/issues/55641).

View File

@@ -0,0 +1,3 @@
---
minor_changes:
- "airbrake_deployment - add ``version`` param; clarified docs on ``revision`` param (https://github.com/ansible-collections/community.general/pull/583)."

View File

@@ -1,20 +1,20 @@
namespace: community
name: general
version: 1.2.0
version: 0.3.0-experimental.meta.redirects
readme: README.md
authors:
- Ansible (https://github.com/ansible)
description: null
license_file: COPYING
tags: [community]
tags: null
# NOTE: No more dependencies can be added to this list
dependencies:
ansible.netcommon: '>=1.0.0'
ansible.netcommon: '>=0.0.2'
ansible.posix: '>=1.0.0'
community.kubernetes: '>=1.0.0'
google.cloud: '>=1.0.0'
community.kubernetes: '>=0.1.0'
google.cloud: '>=0.0.9'
repository: https://github.com/ansible-collections/community.general
#documentation: https://github.com/ansible-collection-migration/community.general/tree/main/docs
#documentation: https://github.com/ansible-collection-migration/community.general/tree/master/docs
homepage: https://github.com/ansible-collections/community.general
issues: https://github.com/ansible-collections/community.general/issues
#type: flatmap

File diff suppressed because it is too large Load Diff

View File

@@ -1 +0,0 @@
./system/iptables_state.py

View File

@@ -1 +0,0 @@
./system/shutdown.py

View File

@@ -1,189 +0,0 @@
# Copyright: (c) 2020, quidame <quidame@poivron.org>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import time
from ansible.plugins.action import ActionBase
from ansible.errors import AnsibleError, AnsibleActionFail, AnsibleConnectionFailure
from ansible.utils.vars import merge_hash
from ansible.utils.display import Display
display = Display()
class ActionModule(ActionBase):
# Keep internal params away from user interactions
_VALID_ARGS = frozenset(('path', 'state', 'table', 'noflush', 'counters', 'modprobe', 'ip_version', 'wait'))
DEFAULT_SUDOABLE = True
MSG_ERROR__ASYNC_AND_POLL_NOT_ZERO = (
"This module doesn't support async>0 and poll>0 when its 'state' param "
"is set to 'restored'. To enable its rollback feature (that needs the "
"module to run asynchronously on the remote), please set task attribute "
"'poll' (=%s) to 0, and 'async' (=%s) to a value >2 and not greater than "
"'ansible_timeout' (=%s) (recommended).")
MSG_WARNING__NO_ASYNC_IS_NO_ROLLBACK = (
"Attempts to restore iptables state without rollback in case of mistake "
"may lead the ansible controller to loose access to the hosts and never "
"regain it before fixing firewall rules through a serial console, or any "
"other way except SSH. Please set task attribute 'poll' (=%s) to 0, and "
"'async' (=%s) to a value >2 and not greater than 'ansible_timeout' (=%s) "
"(recommended).")
MSG_WARNING__ASYNC_GREATER_THAN_TIMEOUT = (
"You attempt to restore iptables state with rollback in case of mistake, "
"but with settings that will lead this rollback to happen AFTER that the "
"controller will reach its own timeout. Please set task attribute 'poll' "
"(=%s) to 0, and 'async' (=%s) to a value >2 and not greater than "
"'ansible_timeout' (=%s) (recommended).")
def _async_result(self, module_args, task_vars, timeout):
'''
Retrieve results of the asynchonous task, and display them in place of
the async wrapper results (those with the ansible_job_id key).
'''
# At least one iteration is required, even if timeout is 0.
for i in range(max(1, timeout)):
async_result = self._execute_module(
module_name='ansible.builtin.async_status',
module_args=module_args,
task_vars=task_vars,
wrap_async=False)
if async_result['finished'] == 1:
break
time.sleep(min(1, timeout))
return async_result
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
self._supports_async = True
result = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
if not result.get('skipped'):
# FUTURE: better to let _execute_module calculate this internally?
wrap_async = self._task.async_val and not self._connection.has_native_async
# Set short names for values we'll have to compare or reuse
task_poll = self._task.poll
task_async = self._task.async_val
check_mode = self._play_context.check_mode
max_timeout = self._connection._play_context.timeout
module_name = self._task.action
module_args = self._task.args
if module_args.get('state', None) == 'restored':
if not wrap_async:
if not check_mode:
display.warning(self.MSG_WARNING__NO_ASYNC_IS_NO_ROLLBACK % (
task_poll,
task_async,
max_timeout))
elif task_poll:
raise AnsibleActionFail(self.MSG_ERROR__ASYNC_AND_POLL_NOT_ZERO % (
task_poll,
task_async,
max_timeout))
else:
if task_async > max_timeout and not check_mode:
display.warning(self.MSG_WARNING__ASYNC_GREATER_THAN_TIMEOUT % (
task_poll,
task_async,
max_timeout))
# BEGIN snippet from async_status action plugin
env_async_dir = [e for e in self._task.environment if
"ANSIBLE_ASYNC_DIR" in e]
if len(env_async_dir) > 0:
# for backwards compatibility we need to get the dir from
# ANSIBLE_ASYNC_DIR that is defined in the environment. This is
# deprecated and will be removed in favour of shell options
async_dir = env_async_dir[0]['ANSIBLE_ASYNC_DIR']
msg = "Setting the async dir from the environment keyword " \
"ANSIBLE_ASYNC_DIR is deprecated. Set the async_dir " \
"shell option instead"
display.deprecated(msg, version='2.0.0',
collection_name='community.general') # was Ansible 2.12
else:
# inject the async directory based on the shell option into the
# module args
async_dir = self.get_shell_option('async_dir', default="~/.ansible_async")
# END snippet from async_status action plugin
# Bind the loop max duration to consistent values on both
# remote and local sides (if not the same, make the loop
# longer on the controller); and set a backup file path.
module_args['_timeout'] = task_async
module_args['_back'] = '%s/iptables.state' % async_dir
async_status_args = dict(_async_dir=async_dir)
confirm_cmd = 'rm -f %s' % module_args['_back']
remaining_time = max(task_async, max_timeout)
# do work!
result = merge_hash(result, self._execute_module(module_args=module_args, task_vars=task_vars, wrap_async=wrap_async))
# Then the 3-steps "go ahead or rollback":
# - reset connection to ensure a persistent one will not be reused
# - confirm the restored state by removing the backup on the remote
# - retrieve the results of the asynchronous task to return them
if '_back' in module_args:
async_status_args['jid'] = result.get('ansible_job_id', None)
if async_status_args['jid'] is None:
raise AnsibleActionFail("Unable to get 'ansible_job_id'.")
# Catch early errors due to missing mandatory option, bad
# option type/value, missing required system command, etc.
result = merge_hash(result, self._async_result(async_status_args, task_vars, 0))
if not result['finished']:
try:
self._connection.reset()
display.v("%s: reset connection" % (module_name))
except AttributeError:
display.warning("Connection plugin does not allow to reset the connection.")
for x in range(max_timeout):
time.sleep(1)
remaining_time -= 1
# - AnsibleConnectionFailure covers rejected requests (i.e.
# by rules with '--jump REJECT')
# - ansible_timeout is able to cover dropped requests (due
# to a rule or policy DROP) if not lower than async_val.
try:
garbage = self._low_level_execute_command(confirm_cmd, sudoable=self.DEFAULT_SUDOABLE)
break
except AnsibleConnectionFailure:
continue
result = merge_hash(result, self._async_result(async_status_args, task_vars, remaining_time))
# Cleanup async related stuff and internal params
for key in ('ansible_job_id', 'results_file', 'started', 'finished'):
if result.get(key):
del result[key]
if result.get('invocation', {}).get('module_args'):
if '_timeout' in result['invocation']['module_args']:
del result['invocation']['module_args']['_back']
del result['invocation']['module_args']['_timeout']
async_status_args['mode'] = 'cleanup'
garbage = self._execute_module(
module_name='ansible.builtin.async_status',
module_args=async_status_args,
task_vars=task_vars,
wrap_async=False)
if not wrap_async:
# remove a temporary path we created
self._remove_tmp_path(self._connection._shell.tmpdir)
return result

View File

@@ -1,211 +0,0 @@
# Copyright: (c) 2020, Amin Vakil <info@aminvakil.com>
# Copyright: (c) 2016-2018, Matt Davis <mdavis@ansible.com>
# Copyright: (c) 2018, Sam Doran <sdoran@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError, AnsibleConnectionFailure
from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.common.collections import is_string
from ansible.plugins.action import ActionBase
from ansible.utils.display import Display
display = Display()
class TimedOutException(Exception):
pass
class ActionModule(ActionBase):
TRANSFERS_FILES = False
_VALID_ARGS = frozenset((
'msg',
'delay',
'search_paths'
))
DEFAULT_CONNECT_TIMEOUT = None
DEFAULT_PRE_SHUTDOWN_DELAY = 0
DEFAULT_SHUTDOWN_MESSAGE = 'Shut down initiated by Ansible'
DEFAULT_SHUTDOWN_COMMAND = 'shutdown'
DEFAULT_SHUTDOWN_COMMAND_ARGS = '-h {delay_min} "{message}"'
DEFAULT_SUDOABLE = True
SHUTDOWN_COMMANDS = {
'alpine': 'poweroff',
'vmkernel': 'halt',
}
SHUTDOWN_COMMAND_ARGS = {
'alpine': '',
'void': '-h +{delay_min} "{message}"',
'freebsd': '-h +{delay_sec}s "{message}"',
'linux': DEFAULT_SHUTDOWN_COMMAND_ARGS,
'macosx': '-h +{delay_min} "{message}"',
'openbsd': '-h +{delay_min} "{message}"',
'solaris': '-y -g {delay_sec} -i 5 "{message}"',
'sunos': '-y -g {delay_sec} -i 5 "{message}"',
'vmkernel': '-d {delay_sec}',
'aix': '-Fh',
}
def __init__(self, *args, **kwargs):
super(ActionModule, self).__init__(*args, **kwargs)
@property
def delay(self):
return self._check_delay('delay', self.DEFAULT_PRE_SHUTDOWN_DELAY)
def _check_delay(self, key, default):
"""Ensure that the value is positive or zero"""
value = int(self._task.args.get(key, default))
if value < 0:
value = 0
return value
def _get_value_from_facts(self, variable_name, distribution, default_value):
"""Get dist+version specific args first, then distribution, then family, lastly use default"""
attr = getattr(self, variable_name)
value = attr.get(
distribution['name'] + distribution['version'],
attr.get(
distribution['name'],
attr.get(
distribution['family'],
getattr(self, default_value))))
return value
def get_shutdown_command_args(self, distribution):
args = self._get_value_from_facts('SHUTDOWN_COMMAND_ARGS', distribution, 'DEFAULT_SHUTDOWN_COMMAND_ARGS')
# Convert seconds to minutes. If less that 60, set it to 0.
delay_sec = self.delay
shutdown_message = self._task.args.get('msg', self.DEFAULT_SHUTDOWN_MESSAGE)
return args.format(delay_sec=delay_sec, delay_min=delay_sec // 60, message=shutdown_message)
def get_distribution(self, task_vars):
# FIXME: only execute the module if we don't already have the facts we need
distribution = {}
display.debug('{action}: running setup module to get distribution'.format(action=self._task.action))
module_output = self._execute_module(
task_vars=task_vars,
module_name='ansible.legacy.setup',
module_args={'gather_subset': 'min'})
try:
if module_output.get('failed', False):
raise AnsibleError('Failed to determine system distribution. {0}, {1}'.format(
to_native(module_output['module_stdout']).strip(),
to_native(module_output['module_stderr']).strip()))
distribution['name'] = module_output['ansible_facts']['ansible_distribution'].lower()
distribution['version'] = to_text(module_output['ansible_facts']['ansible_distribution_version'].split('.')[0])
distribution['family'] = to_text(module_output['ansible_facts']['ansible_os_family'].lower())
display.debug("{action}: distribution: {dist}".format(action=self._task.action, dist=distribution))
return distribution
except KeyError as ke:
raise AnsibleError('Failed to get distribution information. Missing "{0}" in output.'.format(ke.args[0]))
def get_shutdown_command(self, task_vars, distribution):
shutdown_bin = self._get_value_from_facts('SHUTDOWN_COMMANDS', distribution, 'DEFAULT_SHUTDOWN_COMMAND')
default_search_paths = ['/sbin', '/usr/sbin', '/usr/local/sbin']
search_paths = self._task.args.get('search_paths', default_search_paths)
# FIXME: switch all this to user arg spec validation methods when they are available
# Convert bare strings to a list
if is_string(search_paths):
search_paths = [search_paths]
# Error if we didn't get a list
err_msg = "'search_paths' must be a string or flat list of strings, got {0}"
try:
incorrect_type = any(not is_string(x) for x in search_paths)
if not isinstance(search_paths, list) or incorrect_type:
raise TypeError
except TypeError:
raise AnsibleError(err_msg.format(search_paths))
display.debug('{action}: running find module looking in {paths} to get path for "{command}"'.format(
action=self._task.action,
command=shutdown_bin,
paths=search_paths))
find_result = self._execute_module(
task_vars=task_vars,
# prevent collection search by calling with ansible.legacy (still allows library/ override of find)
module_name='ansible.legacy.find',
module_args={
'paths': search_paths,
'patterns': [shutdown_bin],
'file_type': 'any'
}
)
full_path = [x['path'] for x in find_result['files']]
if not full_path:
raise AnsibleError('Unable to find command "{0}" in search paths: {1}'.format(shutdown_bin, search_paths))
self._shutdown_command = full_path[0]
return self._shutdown_command
def perform_shutdown(self, task_vars, distribution):
result = {}
shutdown_result = {}
shutdown_command = self.get_shutdown_command(task_vars, distribution)
shutdown_command_args = self.get_shutdown_command_args(distribution)
shutdown_command_exec = '{0} {1}'.format(shutdown_command, shutdown_command_args)
self.cleanup(force=True)
try:
display.vvv("{action}: shutting down server...".format(action=self._task.action))
display.debug("{action}: shutting down server with command '{command}'".format(action=self._task.action, command=shutdown_command_exec))
if self._play_context.check_mode:
shutdown_result['rc'] = 0
else:
shutdown_result = self._low_level_execute_command(shutdown_command_exec, sudoable=self.DEFAULT_SUDOABLE)
except AnsibleConnectionFailure as e:
# If the connection is closed too quickly due to the system being shutdown, carry on
display.debug('{action}: AnsibleConnectionFailure caught and handled: {error}'.format(action=self._task.action, error=to_text(e)))
shutdown_result['rc'] = 0
if shutdown_result['rc'] != 0:
result['failed'] = True
result['shutdown'] = False
result['msg'] = "Shutdown command failed. Error was {stdout}, {stderr}".format(
stdout=to_native(shutdown_result['stdout'].strip()),
stderr=to_native(shutdown_result['stderr'].strip()))
return result
result['failed'] = False
result['shutdown_command'] = shutdown_command_exec
return result
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
self._supports_async = True
# If running with local connection, fail so we don't shutdown ourself
if self._connection.transport == 'local' and (not self._play_context.check_mode):
msg = 'Running {0} with local connection would shutdown the control node.'.format(self._task.action)
return {'changed': False, 'elapsed': 0, 'shutdown': False, 'failed': True, 'msg': msg}
if task_vars is None:
task_vars = {}
result = super(ActionModule, self).run(tmp, task_vars)
if result.get('skipped', False) or result.get('failed', False):
return result
distribution = self.get_distribution(task_vars)
# Initiate shutdown
shutdown_result = self.perform_shutdown(task_vars, distribution)
if shutdown_result['failed']:
result = shutdown_result
return result
result['shutdown'] = True
result['changed'] = True
result['shutdown_command'] = shutdown_result['shutdown_command']
return result

View File

@@ -40,7 +40,7 @@ DOCUMENTATION = '''
- name: ANSIBLE_DOAS_EXE
become_flags:
description: Options to pass to doas
default: ''
default:
ini:
- section: privilege_escalation
key: become_flags
@@ -117,8 +117,9 @@ class BecomeModule(BecomeBase):
if not self.get_option('become_pass') and '-n' not in flags:
flags += ' -n'
become_user = self.get_option('become_user')
user = '-u %s' % (become_user) if become_user else ''
user = self.get_option('become_user')
if user:
user = '-u %s' % (user)
success_cmd = self._build_success_command(cmd, shell, noexe=True)
executable = getattr(shell, 'executable', shell.SHELL_FAMILY)

View File

@@ -89,7 +89,8 @@ class BecomeModule(BecomeBase):
self.prompt = '[dzdo via ansible, key=%s] password:' % self._id
flags = '%s -p "%s"' % (flags.replace('-n', ''), self.prompt)
become_user = self.get_option('become_user')
user = '-u %s' % (become_user) if become_user else ''
user = self.get_option('become_user')
if user:
user = '-u %s' % (user)
return ' '.join([becomecmd, flags, user, self._build_success_command(cmd, shell)])

View File

@@ -13,6 +13,7 @@ DOCUMENTATION = '''
options:
become_user:
description: User you 'become' to execute the task
default: ''
ini:
- section: privilege_escalation
key: become_user

View File

@@ -97,8 +97,9 @@ class BecomeModule(BecomeBase):
become_exe = self.get_option('become_exe')
flags = self.get_option('become_flags')
become_user = self.get_option('become_user')
user = '-u %s' % (become_user) if become_user else ''
user = self.get_option('become_user')
if user:
user = '-u %s' % (user)
noexe = not self.get_option('wrap_exe')
return ' '.join([become_exe, flags, user, self._build_success_command(cmd, shell, noexe=noexe)])

View File

@@ -14,7 +14,7 @@ DOCUMENTATION = '''
become_user:
description:
- User you 'become' to execute the task
- This plugin ignores this setting as pfexec uses it's own C(exec_attr) to figure this out,
- This plugin ignores this setting as pfexec uses it's own ``exec_attr`` to figure this out,
but it is supplied here for Ansible to make decisions needed for the task execution, like file permissions.
default: root
ini:
@@ -80,8 +80,8 @@ DOCUMENTATION = '''
- name: ansible_pfexec_wrap_execution
env:
- name: ANSIBLE_PFEXEC_WRAP_EXECUTION
notes:
- This plugin ignores I(become_user) as pfexec uses it's own C(exec_attr) to figure this out.
note:
- This plugin ignores ``become_user`` as pfexec uses it's own ``exec_attr`` to figure this out.
'''
from ansible.plugins.become import BecomeBase

View File

@@ -6,7 +6,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
cache: memcached
short_description: Use memcached DB for cache
description:

View File

@@ -5,7 +5,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
cache: redis
short_description: Use Redis DB for cache
description:

View File

@@ -7,7 +7,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: actionable
type: stdout
short_description: shows only items that need attention

View File

@@ -7,9 +7,8 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: cgroup_memory_recap
type: aggregate
callback_type: aggregate
requirements:
- whitelist in configuration
- cgroups

View File

@@ -6,7 +6,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: context_demo
type: aggregate
short_description: demo callback that adds play/task context

View File

@@ -6,9 +6,13 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible import constants as C
from ansible.plugins.callback import CallbackBase
from ansible.utils.color import colorize, hostcolor
from ansible.template import Templar
from ansible.playbook.task_include import TaskInclude
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: counter_enabled
type: stdout
short_description: adds counters to the output items (tasks and hosts/task)
@@ -22,12 +26,6 @@ DOCUMENTATION = '''
- set as stdout callback in ansible.cfg (stdout_callback = counter_enabled)
'''
from ansible import constants as C
from ansible.plugins.callback import CallbackBase
from ansible.utils.color import colorize, hostcolor
from ansible.template import Templar
from ansible.playbook.task_include import TaskInclude
class CallbackModule(CallbackBase):

View File

@@ -8,7 +8,7 @@ __metaclass__ = type
DOCUMENTATION = r'''
callback: diy
type: stdout
callback_type: stdout
short_description: Customize the output
version_added: 0.2.0
description:
@@ -629,7 +629,7 @@ playbook.yml: >
gather_facts: no
tasks:
- name: Default plugin output
ansible.builtin.debug:
debug:
msg: default plugin output
- name: Override from play vars
@@ -687,11 +687,11 @@ playbook.yml: >
tasks:
- name: Custom banner with default plugin result output
ansible.builtin.debug:
debug:
msg: "default plugin output: result example"
- name: Override from task vars
ansible.builtin.debug:
debug:
msg: "example {{ two }}"
changed_when: true
vars:
@@ -703,14 +703,14 @@ playbook.yml: >
ansible_callback_diy_runner_on_ok_msg_color: "{{ 'yellow' if ansible_callback_diy.result.is_changed else 'bright green' }}"
- name: Suppress output
ansible.builtin.debug:
debug:
msg: i should not be displayed
vars:
ansible_callback_diy_playbook_on_task_start_msg: ""
ansible_callback_diy_runner_on_ok_msg: ""
- name: Using alias vars (see ansible.cfg)
ansible.builtin.debug:
debug:
msg:
when: False
vars:
@@ -719,13 +719,13 @@ playbook.yml: >
on_skipped_msg_color: white
- name: Just stdout
ansible.builtin.command: echo some stdout
command: echo some stdout
vars:
ansible_callback_diy_playbook_on_task_start_msg: "\n"
ansible_callback_diy_runner_on_ok_msg: "{{ ansible_callback_diy.result.output.stdout }}\n"
- name: Multiline output
ansible.builtin.debug:
debug:
msg: "{{ multiline }}"
vars:
ansible_callback_diy_playbook_on_task_start_msg: "\nDIY output(via task vars): task example: {{ ansible_callback_diy.task.name }}"
@@ -738,7 +738,7 @@ playbook.yml: >
ansible_callback_diy_playbook_on_task_start_msg_color: bright blue
- name: Indentation
ansible.builtin.debug:
debug:
msg: "{{ item.msg }}"
with_items:
- { indent: 1, msg: one., color: red }
@@ -751,14 +751,14 @@ playbook.yml: >
ansible_callback_diy_runner_on_ok_msg_color: bright green
- name: Using lookup and template as file
ansible.builtin.shell: "echo {% raw %}'output from {{ file_name }}'{% endraw %} > {{ file_name }}"
shell: "echo {% raw %}'output from {{ file_name }}'{% endraw %} > {{ file_name }}"
vars:
ansible_callback_diy_playbook_on_task_start_msg: "\nDIY output(via task vars): task example: {{ ansible_callback_diy.task.name }}"
file_name: diy_file_template_example
ansible_callback_diy_runner_on_ok_msg: "{{ lookup('template', file_name) }}"
- name: 'Look at top level vars available to the "runner_on_ok" callback'
ansible.builtin.debug:
debug:
msg: ''
vars:
ansible_callback_diy_playbook_on_task_start_msg: "\nDIY output(via task vars): task example: {{ ansible_callback_diy.task.name }}"
@@ -771,7 +771,7 @@ playbook.yml: >
ansible_callback_diy_runner_on_ok_msg_color: white
- name: 'Look at event data available to the "runner_on_ok" callback'
ansible.builtin.debug:
debug:
msg: ''
vars:
ansible_callback_diy_playbook_on_task_start_msg: "\nDIY output(via task vars): task example: {{ ansible_callback_diy.task.name }}"

View File

@@ -7,7 +7,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: full_skip
type: stdout
short_description: suppresses tasks if all hosts skipped

View File

@@ -6,9 +6,8 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: hipchat
type: notification
callback_type: notification
requirements:
- whitelist in configuration.
- prettytable (python lib)

View File

@@ -6,7 +6,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: jabber
type: notification
short_description: post task events to a jabber server

View File

@@ -6,7 +6,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: log_plays
type: notification
short_description: write playbook output to log file

View File

@@ -5,9 +5,8 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: logdna
type: aggregate
callback_type: aggregate
short_description: Sends playbook logs to LogDNA
description:
- This callback will report logs from playbook actions, tasks, and events to LogDNA (https://app.logdna.com)

View File

@@ -5,7 +5,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: logentries
type: notification
short_description: Sends events to Logentries
@@ -76,7 +75,7 @@ examples: >
To enable, add this to your ansible.cfg file in the defaults block
[defaults]
callback_whitelist = community.general.logentries
callback_whitelist = logentries
Either set the environment variables
export LOGENTRIES_API=data.logentries.com

View File

@@ -6,7 +6,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: logstash
type: notification
short_description: Sends events to Logstash
@@ -20,28 +19,16 @@ DOCUMENTATION = '''
description: Address of the Logstash server
env:
- name: LOGSTASH_SERVER
ini:
- section: callback_logstash
key: server
version_added: 1.0.0
default: localhost
port:
description: Port on which logstash is listening
env:
- name: LOGSTASH_PORT
ini:
- section: callback_logstash
key: port
version_added: 1.0.0
default: 5000
type:
description: Message type
env:
- name: LOGSTASH_TYPE
ini:
- section: callback_logstash
key: type
version_added: 1.0.0
default: ansible
'''
@@ -81,7 +68,7 @@ class CallbackModule(CallbackBase):
Requires:
python-logstash
This plugin makes use of the following environment variables or ini config:
This plugin makes use of the following environment variables:
LOGSTASH_SERVER (optional): defaults to localhost
LOGSTASH_PORT (optional): defaults to 5000
LOGSTASH_TYPE (optional): defaults to ansible
@@ -92,37 +79,30 @@ class CallbackModule(CallbackBase):
CALLBACK_NAME = 'community.general.logstash'
CALLBACK_NEEDS_WHITELIST = True
def __init__(self, display=None):
super(CallbackModule, self).__init__(display=display)
def __init__(self):
super(CallbackModule, self).__init__()
if not HAS_LOGSTASH:
self.disabled = True
self._display.warning("The required python-logstash is not installed. "
"pip install python-logstash")
else:
self.logger = logging.getLogger('python-logstash-logger')
self.logger.setLevel(logging.DEBUG)
self.handler = logstash.TCPLogstashHandler(
os.getenv('LOGSTASH_SERVER', 'localhost'),
int(os.getenv('LOGSTASH_PORT', 5000)),
version=1,
message_type=os.getenv('LOGSTASH_TYPE', 'ansible')
)
self.logger.addHandler(self.handler)
self.hostname = socket.gethostname()
self.session = str(uuid.uuid1())
self.errors = 0
self.start_time = datetime.utcnow()
def set_options(self, task_keys=None, var_options=None, direct=None):
super(CallbackModule, self).set_options(task_keys=task_keys, var_options=var_options, direct=direct)
self.logger = logging.getLogger('python-logstash-logger')
self.logger.setLevel(logging.DEBUG)
self.logstash_server = self.get_option('server')
self.logstash_port = self.get_option('port')
self.logstash_type = self.get_option('type')
self.handler = logstash.TCPLogstashHandler(
self.logstash_server,
int(self.logstash_port),
version=1,
message_type=self.logstash_type
)
self.logger.addHandler(self.handler)
self.hostname = socket.gethostname()
self.session = str(uuid.uuid1())
self.errors = 0
def v2_playbook_on_start(self, playbook):
self.playbook = playbook._file_name
data = {

View File

@@ -52,7 +52,7 @@ options:
ini:
- section: callback_mail
key: bcc
notes:
note:
- "TODO: expand configuration options now that plugins can leverage Ansible's configuration"
'''

View File

@@ -6,9 +6,8 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: 'null'
type: stdout
callback_type: stdout
requirements:
- set as main display callback
short_description: Don't display stuff to screen

View File

@@ -1 +0,0 @@
say.py

View File

@@ -7,7 +7,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: say
type: notification
requirements:

View File

@@ -6,9 +6,8 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: selective
type: stdout
callback_type: stdout
requirements:
- set as main display callback
short_description: only print certain tasks
@@ -31,8 +30,8 @@ DOCUMENTATION = '''
'''
EXAMPLES = """
- ansible.builtin.debug: msg="This will not be printed"
- ansible.builtin.debug: msg="But this will"
- debug: msg="This will not be printed"
- debug: msg="But this will"
tags: [print_action]
"""
@@ -202,7 +201,7 @@ class CallbackModule(CallbackBase):
)
if 'results' in result._result:
for r in result._result['results']:
failed = 'failed' in r and r['failed']
failed = 'failed' in r
stderr = [r.get('exception', None), r.get('module_stderr', None)]
stderr = "\n".join([e for e in stderr if e]).strip()

View File

@@ -7,9 +7,8 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: slack
type: notification
callback_type: notification
requirements:
- whitelist in configuration
- prettytable (python library)

View File

@@ -21,7 +21,7 @@ DOCUMENTATION = '''
callback: splunk
type: aggregate
short_description: Sends task result events to Splunk HTTP Event Collector
author: "Stuart Hirst (!UNKNOWN) <support@convergingdata.com>"
author: "Stuart Hirst <support@convergingdata.com>"
description:
- This callback plugin will send task results as JSON formatted events to a Splunk HTTP collector.
- The companion Splunk Monitoring & Diagnostics App is available here "https://splunkbase.splunk.com/app/4023/"
@@ -45,25 +45,13 @@ DOCUMENTATION = '''
ini:
- section: callback_splunk
key: authtoken
validate_certs:
description: Whether to validate certificates for connections to HEC. It is not recommended to set to
C(false) except when you are sure that nobody can intercept the connection
between this plugin and HEC, as setting it to C(false) allows man-in-the-middle attacks!
env:
- name: SPLUNK_VALIDATE_CERTS
ini:
- section: callback_splunk
key: validate_certs
type: bool
default: true
version_added: '1.0.0'
'''
EXAMPLES = '''
examples: >
To enable, add this to your ansible.cfg file in the defaults block
[defaults]
callback_whitelist = community.general.splunk
callback_whitelist = splunk
Set the environment variable
export SPLUNK_URL=http://mysplunkinstance.datapaas.io:8088/services/collector/event
export SPLUNK_AUTHTOKEN=f23blad6-5965-4537-bf69-5b5a545blabla88
@@ -96,7 +84,7 @@ class SplunkHTTPCollectorSource(object):
self.ip_address = socket.gethostbyname(socket.gethostname())
self.user = getpass.getuser()
def send_event(self, url, authtoken, validate_certs, state, result, runtime):
def send_event(self, url, authtoken, state, result, runtime):
if result._task_fields['args'].get('_ansible_check_mode') is True:
self.ansible_check_mode = True
@@ -141,8 +129,7 @@ class SplunkHTTPCollectorSource(object):
'Content-type': 'application/json',
'Authorization': 'Splunk ' + authtoken
},
method='POST',
validate_certs=validate_certs
method='POST'
)
@@ -157,7 +144,6 @@ class CallbackModule(CallbackBase):
self.start_datetimes = {} # Collect task start times
self.url = None
self.authtoken = None
self.validate_certs = None
self.splunk = SplunkHTTPCollectorSource()
def _runtime(self, result):
@@ -167,9 +153,7 @@ class CallbackModule(CallbackBase):
).total_seconds()
def set_options(self, task_keys=None, var_options=None, direct=None):
super(CallbackModule, self).set_options(task_keys=task_keys,
var_options=var_options,
direct=direct)
super(CallbackModule, self).set_options(task_keys=task_keys, var_options=var_options, direct=direct)
self.url = self.get_option('url')
@@ -191,8 +175,6 @@ class CallbackModule(CallbackBase):
'`SPLUNK_AUTHTOKEN` environment variable or '
'in the ansible.cfg file.')
self.validate_certs = self.get_option('validate_certs')
def v2_playbook_on_start(self, playbook):
self.splunk.ansible_playbook = basename(playbook._file_name)
@@ -206,7 +188,6 @@ class CallbackModule(CallbackBase):
self.splunk.send_event(
self.url,
self.authtoken,
self.validate_certs,
'OK',
result,
self._runtime(result)
@@ -216,7 +197,6 @@ class CallbackModule(CallbackBase):
self.splunk.send_event(
self.url,
self.authtoken,
self.validate_certs,
'SKIPPED',
result,
self._runtime(result)
@@ -226,7 +206,6 @@ class CallbackModule(CallbackBase):
self.splunk.send_event(
self.url,
self.authtoken,
self.validate_certs,
'FAILED',
result,
self._runtime(result)
@@ -236,7 +215,6 @@ class CallbackModule(CallbackBase):
self.splunk.send_event(
self.url,
self.authtoken,
self.validate_certs,
'FAILED',
result,
self._runtime(result)
@@ -246,7 +224,6 @@ class CallbackModule(CallbackBase):
self.splunk.send_event(
self.url,
self.authtoken,
self.validate_certs,
'UNREACHABLE',
result,
self._runtime(result)

View File

@@ -7,9 +7,8 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: stderr
type: stdout
callback_type: stdout
requirements:
- set as main display callback
short_description: Splits output, sending failed tasks to stderr

View File

@@ -42,7 +42,7 @@ EXAMPLES = '''
examples: >
To enable, add this to your ansible.cfg file in the defaults block
[defaults]
callback_whitelist = community.general.sumologic
callback_whitelist = sumologic
Set the environment variable
export SUMOLOGIC_URL=https://endpoint1.collection.us2.sumologic.com/receiver/v1/http/R8moSv1d8EW9LAUFZJ6dbxCFxwLH6kfCdcBfddlfxCbLuL-BN5twcTpMk__pYy_cDmp==

View File

@@ -6,9 +6,8 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: syslog_json
type: notification
callback_type: notification
requirements:
- whitelist in configuration
short_description: sends JSON events to syslog

View File

@@ -9,7 +9,7 @@ __metaclass__ = type
DOCUMENTATION = '''
callback: unixy
type: stdout
author: Allyson Bowles (@akatch)
author: Allyson Bowles <@akatch>
short_description: condensed Ansible output
description:
- Consolidated Ansible output in the style of LINUX/UNIX startup logs.

View File

@@ -6,7 +6,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
callback: yaml
type: stdout
short_description: yaml-ized Ansible screen output

View File

@@ -9,7 +9,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Maykel Moya (!UNKNOWN) <mmoya@speedyrails.com>
author: Maykel Moya <mmoya@speedyrails.com>
connection: chroot
short_description: Interact with local chroot
description:

View File

@@ -11,8 +11,8 @@ __metaclass__ = type
DOCUMENTATION = '''
author:
- Lorin Hochestein (!UNKNOWN)
- Leendert Brouwer (!UNKNOWN)
- Lorin Hochestein
- Leendert Brouwer
connection: docker
short_description: Run tasks in docker containers
description:

View File

@@ -9,7 +9,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Stephan Lohse (!UNKNOWN) <dev-github@ploek.org>
author: Stephan Lohse <dev-github@ploek.org>
connection: iocage
short_description: Run tasks in iocage jails
description:

View File

@@ -6,7 +6,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Joerg Thalheim (!UNKNOWN) <joerg@higgsboson.tk>
author: Joerg Thalheim <joerg@higgsboson.tk>
connection: lxc
short_description: Run tasks in lxc containers via lxc python library
description:

View File

@@ -6,7 +6,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Matt Clay (@mattclay) <matt@mystile.com>
author: Matt Clay <matt@mystile.com>
connection: lxd
short_description: Run tasks in lxc containers via lxc CLI
description:

View File

@@ -22,7 +22,7 @@ __metaclass__ = type
DOCUMENTATION = '''
author:
- xuxinkun (!UNKNOWN)
- xuxinkun
connection: oc
@@ -150,7 +150,7 @@ DOCUMENTATION = '''
from ansible_collections.community.kubernetes.plugins.connection.kubectl import Connection as KubectlConnection
CONNECTION_TRANSPORT = 'community.general.oc'
CONNECTION_TRANSPORT = 'oc'
CONNECTION_OPTIONS = {
'oc_container': '-c',

View File

@@ -0,0 +1,33 @@
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Ansible Project
# Copyright: (c) 2018, Abhijeet Kasurde (akasurde@redhat.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
class ModuleDocFragment(object):
# Parameters for DigitalOcean modules
DOCUMENTATION = r'''
options:
oauth_token:
description:
- DigitalOcean OAuth token.
- "There are several other environment variables which can be used to provide this value."
- "i.e., - 'DO_API_TOKEN', 'DO_API_KEY', 'DO_OAUTH_TOKEN' and 'OAUTH_TOKEN'"
type: str
aliases: [ api_token ]
timeout:
description:
- The timeout in seconds used for polling DigitalOcean's API.
type: int
default: 30
validate_certs:
description:
- If set to C(no), the SSL certificates will not be validated.
- This should only set to C(no) used on personally controlled sites using self-signed certificates.
type: bool
default: yes
'''

View File

@@ -0,0 +1,37 @@
# -*- coding: utf-8 -*-
# Copyright: (c) 2016, Gregory Shulov <gregory.shulov@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
class ModuleDocFragment(object):
# Standard Infinibox documentation fragment
DOCUMENTATION = r'''
options:
system:
description:
- Infinibox Hostname or IPv4 Address.
type: str
required: true
user:
description:
- Infinibox User username with sufficient priveledges ( see notes ).
required: false
password:
description:
- Infinibox User password.
type: str
notes:
- This module requires infinisdk python library
- You must set INFINIBOX_USER and INFINIBOX_PASSWORD environment variables
if user and password arguments are not passed to the module directly
- Ansible uses the infinisdk configuration file C(~/.infinidat/infinisdk.ini) if no credentials are provided.
See U(http://infinisdk.readthedocs.io/en/latest/getting_started.html)
requirements:
- "python >= 2.7"
- infinisdk
'''

View File

@@ -0,0 +1,82 @@
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Jonathan Mainguy <jon@soh.re>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
class ModuleDocFragment(object):
# Standard mysql documentation fragment
DOCUMENTATION = r'''
options:
login_user:
description:
- The username used to authenticate with.
type: str
login_password:
description:
- The password used to authenticate with.
type: str
login_host:
description:
- Host running the database.
- In some cases for local connections the I(login_unix_socket=/path/to/mysqld/socket),
that is usually C(/var/run/mysqld/mysqld.sock), needs to be used instead of I(login_host=localhost).
type: str
default: localhost
login_port:
description:
- Port of the MySQL server. Requires I(login_host) be defined as other than localhost if login_port is used.
type: int
default: 3306
login_unix_socket:
description:
- The path to a Unix domain socket for local connections.
type: str
connect_timeout:
description:
- The connection timeout when connecting to the MySQL server.
type: int
default: 30
config_file:
description:
- Specify a config file from which user and password are to be read.
type: path
default: '~/.my.cnf'
ca_cert:
description:
- The path to a Certificate Authority (CA) certificate. This option, if used, must specify the same certificate
as used by the server.
type: path
aliases: [ ssl_ca ]
client_cert:
description:
- The path to a client public key certificate.
type: path
aliases: [ ssl_cert ]
client_key:
description:
- The path to the client private key.
type: path
aliases: [ ssl_key ]
requirements:
- PyMySQL (Python 2.7 and Python 3.X), or
- MySQLdb (Python 2.x)
notes:
- Requires the PyMySQL (Python 2.7 and Python 3.X) or MySQL-python (Python 2.X) package on the remote host.
The Python package may be installed with apt-get install python-pymysql (Ubuntu; see M(ansible.builtin.apt)) or
yum install python2-PyMySQL (RHEL/CentOS/Fedora; see M(ansible.builtin.yum)). You can also use dnf install python2-PyMySQL
for newer versions of Fedora; see M(ansible.builtin.dnf).
- Both C(login_password) and C(login_user) are required when you are
passing credentials. If none are present, the module will attempt to read
the credentials from C(~/.my.cnf), and finally fall back to using the MySQL
default login of 'root' with no password.
- If there are problems with local connections, using I(login_unix_socket=/path/to/mysqld/socket)
instead of I(login_host=localhost) might help. As an example, the default MariaDB installation of version 10.4
and later uses the unix_socket authentication plugin by default that
without using I(login_unix_socket=/var/run/mysqld/mysqld.sock) (the default path)
causes the error ``Host '127.0.0.1' is not allowed to connect to this MariaDB server``.
'''

View File

@@ -54,5 +54,5 @@ requirements:
notes:
- "In order to use this module you have to install oVirt Python SDK.
To ensure it's installed with correct version you can create the following task:
ansible.builtin.pip: name=ovirt-engine-sdk-python version=4.3.0"
pip: name=ovirt-engine-sdk-python version=4.3.0"
'''

View File

@@ -0,0 +1,57 @@
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
class ModuleDocFragment(object):
# Documentation fragment for ProxySQL connectivity
CONNECTIVITY = r'''
options:
login_user:
description:
- The username used to authenticate to ProxySQL admin interface.
type: str
login_password:
description:
- The password used to authenticate to ProxySQL admin interface.
type: str
login_host:
description:
- The host used to connect to ProxySQL admin interface.
type: str
default: '127.0.0.1'
login_port:
description:
- The port used to connect to ProxySQL admin interface.
type: int
default: 6032
config_file:
description:
- Specify a config file from which I(login_user) and I(login_password)
are to be read.
type: path
default: ''
requirements:
- PyMySQL (Python 2.7 and Python 3.X), or
- MySQLdb (Python 2.x)
'''
# Documentation fragment for managing ProxySQL configuration
MANAGING_CONFIG = r'''
options:
save_to_disk:
description:
- Save config to sqlite db on disk to persist the configuration.
type: bool
default: 'yes'
load_to_runtime:
description:
- Dynamically load config to runtime memory.
type: bool
default: 'yes'
'''

View File

@@ -16,7 +16,6 @@ options:
description:
- Scaleway OAuth token.
type: str
required: true
aliases: [ oauth_token ]
api_url:
description:

View File

@@ -1,94 +0,0 @@
# (c) 2015, Filipe Niero Felisbino <filipenf@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# contributed by Kelly Brazil <kellyjonbrazil@gmail.com>
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError, AnsibleFilterError
import importlib
try:
import jc
HAS_LIB = True
except ImportError:
HAS_LIB = False
def jc(data, parser, quiet=True, raw=False):
"""Convert returned command output to JSON using the JC library
Arguments:
parser required (string) the correct parser for the input data (e.g. 'ifconfig')
see https://github.com/kellyjonbrazil/jc#parsers for latest list of parsers.
quiet optional (bool) True to suppress warning messages (default is True)
raw optional (bool) True to return pre-processed JSON (default is False)
Returns:
dictionary or list of dictionaries
Example:
- name: run date command
hosts: ubuntu
tasks:
- shell: date
register: result
- set_fact:
myvar: "{{ result.stdout | community.general.jc('date') }}"
- debug:
msg: "{{ myvar }}"
produces:
ok: [192.168.1.239] => {
"msg": {
"day": 9,
"hour": 22,
"minute": 6,
"month": "Aug",
"month_num": 8,
"second": 22,
"timezone": "UTC",
"weekday": "Sun",
"weekday_num": 1,
"year": 2020
}
}
"""
if not HAS_LIB:
raise AnsibleError('You need to install "jc" prior to running jc filter')
try:
jc_parser = importlib.import_module('jc.parsers.' + parser)
return jc_parser.parse(data, quiet=quiet, raw=raw)
except Exception as e:
raise AnsibleFilterError('Error in jc filter plugin: %s' % e)
class FilterModule(object):
''' Query filter '''
def filters(self):
return {
'jc': jc
}

View File

@@ -29,7 +29,7 @@ except ImportError:
def json_query(data, expr):
'''Query data using jmespath query language ( http://jmespath.org ). Example:
- ansible.builtin.debug: msg="{{ instance | json_query(tagged_instances[*].block_device_mapping.*.volume_id') }}"
- debug: msg="{{ instance | json_query(tagged_instances[*].block_device_mapping.*.volume_id') }}"
'''
if not HAS_LIB:
raise AnsibleError('You need to install "jmespath" prior to running '

View File

@@ -1,279 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2020 Orion Poplawski <orion@nwra.com>
# Copyright (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Orion Poplawski (@opoplawski)
name: cobbler
plugin_type: inventory
short_description: Cobbler inventory source
version_added: 1.0.0
description:
- Get inventory hosts from the cobbler service.
- "Uses a configuration file as an inventory source, it must end in C(.cobbler.yml) or C(.cobbler.yaml) and has a C(plugin: cobbler) entry."
extends_documentation_fragment:
- inventory_cache
options:
plugin:
description: The name of this plugin, it should always be set to C(community.general.cobbler) for this plugin to recognize it as it's own.
required: yes
choices: [ 'cobbler', 'community.general.cobbler' ]
url:
description: URL to cobbler.
default: 'http://cobbler/cobbler_api'
env:
- name: COBBLER_SERVER
user:
description: Cobbler authentication user.
required: no
env:
- name: COBBLER_USER
password:
description: Cobbler authentication password
required: no
env:
- name: COBBLER_PASSWORD
cache_fallback:
description: Fallback to cached results if connection to cobbler fails
type: boolean
default: no
exclude_profiles:
description: Profiles to exclude from inventory
type: list
default: []
elements: str
group_by:
description: Keys to group hosts by
type: list
default: [ 'mgmt_classes', 'owners', 'status' ]
group:
description: Group to place all hosts into
default: cobbler
group_prefix:
description: Prefix to apply to cobbler groups
default: cobbler_
want_facts:
description: Toggle, if C(true) the plugin will retrieve host facts from the server
type: boolean
default: yes
'''
EXAMPLES = '''
# my.cobbler.yml
plugin: community.general.cobbler
url: http://cobbler/cobbler_api
user: ansible-tester
password: secure
'''
from distutils.version import LooseVersion
import socket
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.common._collections_compat import MutableMapping
from ansible.module_utils.six import iteritems
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable, to_safe_group_name
# xmlrpc
try:
import xmlrpclib as xmlrpc_client
HAS_XMLRPC_CLIENT = True
except ImportError:
try:
import xmlrpc.client as xmlrpc_client
HAS_XMLRPC_CLIENT = True
except ImportError:
HAS_XMLRPC_CLIENT = False
class InventoryModule(BaseInventoryPlugin, Cacheable):
''' Host inventory parser for ansible using cobbler as source. '''
NAME = 'community.general.cobbler'
def __init__(self):
super(InventoryModule, self).__init__()
# from config
self.cobbler_url = None
self.exclude_profiles = [] # A list of profiles to exclude
self.connection = None
self.token = None
self.cache_key = None
self.use_cache = None
def verify_file(self, path):
valid = False
if super(InventoryModule, self).verify_file(path):
if path.endswith(('cobbler.yaml', 'cobbler.yml')):
valid = True
else:
self.display.vvv('Skipping due to inventory source not ending in "cobbler.yaml" nor "cobbler.yml"')
return valid
def _get_connection(self):
if not HAS_XMLRPC_CLIENT:
raise AnsibleError('Could not import xmlrpc client library')
if self.connection is None:
self.display.vvvv('Connecting to %s\n' % self.cobbler_url)
self.connection = xmlrpc_client.Server(self.cobbler_url, allow_none=True)
self.token = None
if self.get_option('user') is not None:
self.token = self.connection.login(self.get_option('user'), self.get_option('password'))
return self.connection
def _init_cache(self):
if self.cache_key not in self._cache:
self._cache[self.cache_key] = {}
def _reload_cache(self):
if self.get_option('cache_fallback'):
self.display.vvv('Cannot connect to server, loading cache\n')
self._options['cache_timeout'] = 0
self.load_cache_plugin()
self._cache.get(self.cache_key, {})
def _get_profiles(self):
if not self.use_cache or 'profiles' not in self._cache.get(self.cache_key, {}):
c = self._get_connection()
try:
if self.token is not None:
data = c.get_profiles(self.token)
else:
data = c.get_profiles()
except (socket.gaierror, socket.error, xmlrpc_client.ProtocolError):
self._reload_cache()
else:
self._init_cache()
self._cache[self.cache_key]['profiles'] = data
return self._cache[self.cache_key]['profiles']
def _get_systems(self):
if not self.use_cache or 'systems' not in self._cache.get(self.cache_key, {}):
c = self._get_connection()
try:
if self.token is not None:
data = c.get_systems(self.token)
else:
data = c.get_systems()
except (socket.gaierror, socket.error, xmlrpc_client.ProtocolError):
self._reload_cache()
else:
self._init_cache()
self._cache[self.cache_key]['systems'] = data
return self._cache[self.cache_key]['systems']
def _add_safe_group_name(self, group, child=None):
group_name = self.inventory.add_group(to_safe_group_name('%s%s' % (self.get_option('group_prefix'), group.lower().replace(" ", ""))))
if child is not None:
self.inventory.add_child(group_name, child)
return group_name
def parse(self, inventory, loader, path, cache=True):
super(InventoryModule, self).parse(inventory, loader, path)
# read config from file, this sets 'options'
self._read_config_data(path)
# get connection host
self.cobbler_url = self.get_option('url')
self.cache_key = self.get_cache_key(path)
self.use_cache = cache and self.get_option('cache')
self.exclude_profiles = self.get_option('exclude_profiles')
self.group_by = self.get_option('group_by')
for profile in self._get_profiles():
if profile['parent']:
self.display.vvvv('Processing profile %s with parent %s\n' % (profile['name'], profile['parent']))
if profile['parent'] not in self.exclude_profiles:
parent_group_name = self._add_safe_group_name(profile['parent'])
self.display.vvvv('Added profile parent group %s\n' % parent_group_name)
if profile['name'] not in self.exclude_profiles:
group_name = self._add_safe_group_name(profile['name'])
self.display.vvvv('Added profile group %s\n' % group_name)
self.inventory.add_child(parent_group_name, group_name)
else:
self.display.vvvv('Processing profile %s without parent\n' % profile['name'])
# Create a heirarchy of profile names
profile_elements = profile['name'].split('-')
i = 0
while i < len(profile_elements) - 1:
profile_group = '-'.join(profile_elements[0:i + 1])
profile_group_child = '-'.join(profile_elements[0:i + 2])
if profile_group in self.exclude_profiles:
self.display.vvvv('Excluding profile %s\n' % profile_group)
break
group_name = self._add_safe_group_name(profile_group)
self.display.vvvv('Added profile group %s\n' % group_name)
child_group_name = self._add_safe_group_name(profile_group_child)
self.display.vvvv('Added profile child group %s to %s\n' % (child_group_name, group_name))
self.inventory.add_child(group_name, child_group_name)
i = i + 1
# Add default group for this inventory if specified
self.group = to_safe_group_name(self.get_option('group'))
if self.group is not None and self.group != '':
self.inventory.add_group(self.group)
self.display.vvvv('Added site group %s\n' % self.group)
for host in self._get_systems():
# Get the FQDN for the host and add it to the right groups
hostname = host['hostname'] # None
interfaces = host['interfaces']
if host['profile'] in self.exclude_profiles:
self.display.vvvv('Excluding host %s in profile %s\n' % (host['name'], host['profile']))
continue
# hostname is often empty for non-static IP hosts
if hostname == '':
for (iname, ivalue) in iteritems(interfaces):
if ivalue['management'] or not ivalue['static']:
this_dns_name = ivalue.get('dns_name', None)
if this_dns_name is not None and this_dns_name != "":
hostname = this_dns_name
self.display.vvvv('Set hostname to %s from %s\n' % (hostname, iname))
if hostname == '':
self.display.vvvv('Cannot determine hostname for host %s, skipping\n' % host['name'])
continue
self.inventory.add_host(hostname)
self.display.vvvv('Added host %s hostname %s\n' % (host['name'], hostname))
# Add host to profile group
group_name = self._add_safe_group_name(host['profile'], child=hostname)
self.display.vvvv('Added host %s to profile group %s\n' % (hostname, group_name))
# Add host to groups specified by group_by fields
for group_by in self.group_by:
if host[group_by] == '<<inherit>>':
groups = []
else:
groups = [host[group_by]] if isinstance(host[group_by], str) else host[group_by]
for group in groups:
group_name = self._add_safe_group_name(group, child=hostname)
self.display.vvvv('Added host %s to group_by %s group %s\n' % (hostname, group_by, group_name))
# Add to group for this inventory
if self.group is not None:
self.inventory.add_child(self.group, hostname)
# Add host variables
if self.get_option('want_facts'):
try:
self.inventory.set_variable(hostname, 'cobbler', host)
except ValueError as e:
self.display.warning("Could not set host info for %s: %s" % (hostname, to_text(e)))

View File

@@ -24,7 +24,7 @@ DOCUMENTATION = '''
plugin:
description: token that ensures this is a source file for the C(docker_machine) plugin.
required: yes
choices: ['docker_machine', 'community.general.docker_machine']
choices: ['docker_machine']
daemon_env:
description:
- Whether docker daemon connection environment variables should be fetched, and how to behave if they cannot be fetched.
@@ -54,7 +54,7 @@ DOCUMENTATION = '''
EXAMPLES = '''
# Minimal example
plugin: community.general.docker_machine
plugin: docker_machine
# Example using constructed features to create a group per Docker Machine driver
# (https://docs.docker.com/machine/drivers/), e.g.:

View File

@@ -26,11 +26,11 @@ DOCUMENTATION = '''
I(nonleaders) - all nodes except the swarm leader."
options:
plugin:
description: The name of this plugin, it should always be set to C(community.general.docker_swarm)
for this plugin to recognize it as it's own.
description: The name of this plugin, it should always be set to C(docker_swarm) for this plugin to
recognize it as it's own.
type: str
required: true
choices: [ docker_swarm, community.general.docker_swarm ]
choices: docker_swarm
docker_host:
description:
- Socket of a Docker swarm manager node (C(tcp), C(unix)).
@@ -101,20 +101,20 @@ DOCUMENTATION = '''
EXAMPLES = '''
# Minimal example using local docker
plugin: community.general.docker_swarm
plugin: docker_swarm
docker_host: unix://var/run/docker.sock
# Minimal example using remote docker
plugin: community.general.docker_swarm
plugin: docker_swarm
docker_host: tcp://my-docker-host:2375
# Example using remote docker with unverified TLS
plugin: community.general.docker_swarm
plugin: docker_swarm
docker_host: tcp://my-docker-host:2376
tls: yes
# Example using remote docker with verified TLS and client certificate verification
plugin: community.general.docker_swarm
plugin: docker_swarm
docker_host: tcp://my-docker-host:2376
validate_certs: yes
ca_cert: /somewhere/ca.pem
@@ -122,7 +122,7 @@ client_key: /somewhere/key.pem
client_cert: /somewhere/cert.pem
# Example using constructed features to create groups and set ansible_host
plugin: community.general.docker_swarm
plugin: docker_swarm
docker_host: tcp://my-docker-host:2375
strict: False
keyed_groups:

View File

@@ -10,8 +10,8 @@ __metaclass__ = type
DOCUMENTATION = '''
name: gitlab_runners
plugin_type: inventory
author:
- Stefan Heitmüller (@morph027) <stefan.heitmueller@gmx.com>
authors:
- Stefan Heitmüller (stefan.heitmueller@gmx.com)
short_description: Ansible dynamic inventory plugin for GitLab runners.
requirements:
- python >= 2.7
@@ -28,29 +28,19 @@ DOCUMENTATION = '''
required: true
choices:
- gitlab_runners
- community.general.gitlab_runners
server_url:
description: The URL of the GitLab server, with protocol (i.e. http or https).
env:
- name: GITLAB_SERVER_URL
version_added: 1.0.0
type: str
required: true
default: https://gitlab.com
api_token:
description: GitLab token for logging in.
env:
- name: GITLAB_API_TOKEN
version_added: 1.0.0
type: str
aliases:
- private_token
- access_token
filter:
description: filter runners from GitLab API
env:
- name: GITLAB_FILTER
version_added: 1.0.0
type: str
choices: ['active', 'paused', 'online', 'specific', 'shared']
verbose_output:
@@ -61,11 +51,11 @@ DOCUMENTATION = '''
EXAMPLES = '''
# gitlab_runners.yml
plugin: community.general.gitlab_runners
plugin: gitlab_runners
host: https://gitlab.com
# Example using constructed features to create groups and set ansible_host
plugin: community.general.gitlab_runners
plugin: gitlab_runners
host: https://gitlab.com
strict: False
keyed_groups:

View File

@@ -24,7 +24,7 @@ DOCUMENTATION = '''
plugin:
description: token that ensures this is a source file for the 'kubevirt' plugin.
required: True
choices: ['kubevirt', 'community.general.kubevirt']
choices: ['kubevirt']
type: str
host_format:
description:
@@ -123,7 +123,7 @@ EXAMPLES = '''
# File must be named kubevirt.yaml or kubevirt.yml
# Authenticate with token, and return all virtual machines for all namespaces
plugin: community.general.kubevirt
plugin: kubevirt
connections:
- host: https://kubevirt.io
token: xxxxxxxxxxxxxxxx
@@ -131,7 +131,7 @@ connections:
# Use default config (~/.kube/config) file and active context, and return vms with interfaces
# connected to network myovsnetwork and from namespace vms
plugin: community.general.kubevirt
plugin: kubevirt
connections:
- namespaces:
- vms

View File

@@ -22,7 +22,7 @@ DOCUMENTATION = r'''
plugin:
description: marks this as an instance of the 'linode' plugin
required: true
choices: ['linode', 'community.general.linode']
choices: ['linode']
access_token:
description: The Linode account personal access token.
required: true
@@ -42,10 +42,10 @@ DOCUMENTATION = r'''
EXAMPLES = r'''
# Minimal example. `LINODE_ACCESS_TOKEN` is exposed in environment.
plugin: community.general.linode
plugin: linode
# Example with regions, types, groups and access token
plugin: community.general.linode
plugin: linode
access_token: foobar
regions:
- eu-west

View File

@@ -5,7 +5,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
name: nmap
plugin_type: inventory
short_description: Uses nmap to find hosts to target
@@ -20,7 +19,7 @@ DOCUMENTATION = '''
plugin:
description: token that ensures this is a source file for the 'nmap' plugin.
required: True
choices: ['nmap', 'community.general.nmap']
choices: ['nmap']
address:
description: Network IP or range of IPs to scan, you can use a simple range (10.2.2.15-25) or CIDR notation.
required: True
@@ -44,10 +43,10 @@ DOCUMENTATION = '''
- 'TODO: add OS fingerprinting'
'''
EXAMPLES = '''
# inventory.config file in YAML format
plugin: community.general.nmap
strict: False
address: 192.168.0.0/24
# inventory.config file in YAML format
plugin: nmap
strict: False
address: 192.168.0.0/24
'''
import os

View File

@@ -18,7 +18,7 @@ DOCUMENTATION = '''
plugin:
description: token that ensures this is a source file for the 'online' plugin.
required: True
choices: ['online', 'community.general.online']
choices: ['online']
oauth_token:
required: True
description: Online OAuth token.
@@ -49,7 +49,7 @@ EXAMPLES = '''
# online_inventory.yml file in YAML format
# Example command line: ansible-inventory --list -i online_inventory.yml
plugin: community.general.online
plugin: online
hostnames:
- public_ipv4
groups:

View File

@@ -1,348 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2016 Guido Günther <agx@sigxcpu.org>, Daniel Lobato Garcia <dlobatog@redhat.com>
# Copyright (c) 2018 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
name: proxmox
plugin_type: inventory
short_description: Proxmox inventory source
version_added: "1.2.0"
author:
- Jeffrey van Pelt (@Thulium-Drake) <jeff@vanpelt.one>
requirements:
- requests >= 1.1
description:
- Get inventory hosts from a Proxmox PVE cluster.
- "Uses a configuration file as an inventory source, it must end in C(.proxmox.yml) or C(.proxmox.yaml)"
- Will retrieve the first network interface with an IP for Proxmox nodes.
- Can retrieve LXC/QEMU configuration as facts.
extends_documentation_fragment:
- inventory_cache
options:
plugin:
description: The name of this plugin, it should always be set to C(community.general.proxmox) for this plugin to recognize it as it's own.
required: yes
choices: ['community.general.proxmox']
type: str
url:
description: URL to Proxmox cluster.
default: 'http://localhost:8006'
type: str
user:
description: Proxmox authentication user.
required: yes
type: str
password:
description: Proxmox authentication password.
required: yes
type: str
validate_certs:
description: Verify SSL certificate if using HTTPS.
type: boolean
default: yes
group_prefix:
description: Prefix to apply to Proxmox groups.
default: proxmox_
type: str
facts_prefix:
description: Prefix to apply to LXC/QEMU config facts.
default: proxmox_
type: str
want_facts:
description: Gather LXC/QEMU configuration facts.
default: no
type: bool
'''
EXAMPLES = '''
# my.proxmox.yml
plugin: community.general.proxmox
url: http://localhost:8006
user: ansible@pve
password: secure
validate_certs: no
'''
import re
from ansible.module_utils.common._collections_compat import MutableMapping
from distutils.version import LooseVersion
from ansible.errors import AnsibleError
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable
from ansible.module_utils.six.moves.urllib.parse import urlencode
# 3rd party imports
try:
import requests
if LooseVersion(requests.__version__) < LooseVersion('1.1.0'):
raise ImportError
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
class InventoryModule(BaseInventoryPlugin, Cacheable):
''' Host inventory parser for ansible using Proxmox as source. '''
NAME = 'community.general.proxmox'
def __init__(self):
super(InventoryModule, self).__init__()
# from config
self.proxmox_url = None
self.session = None
self.cache_key = None
self.use_cache = None
def verify_file(self, path):
valid = False
if super(InventoryModule, self).verify_file(path):
if path.endswith(('proxmox.yaml', 'proxmox.yml')):
valid = True
else:
self.display.vvv('Skipping due to inventory source not ending in "proxmox.yaml" nor "proxmox.yml"')
return valid
def _get_session(self):
if not self.session:
self.session = requests.session()
self.session.verify = self.get_option('validate_certs')
return self.session
def _get_auth(self):
credentials = urlencode({'username': self.proxmox_user, 'password': self.proxmox_password, })
a = self._get_session()
ret = a.post('%s/api2/json/access/ticket' % self.proxmox_url, data=credentials)
json = ret.json()
self.credentials = {
'ticket': json['data']['ticket'],
'CSRFPreventionToken': json['data']['CSRFPreventionToken'],
}
def _get_json(self, url, ignore_errors=None):
if not self.use_cache or url not in self._cache.get(self.cache_key, {}):
if self.cache_key not in self._cache:
self._cache[self.cache_key] = {'url': ''}
data = []
s = self._get_session()
while True:
headers = {'Cookie': 'PVEAuthCookie={0}'.format(self.credentials['ticket'])}
ret = s.get(url, headers=headers)
if ignore_errors and ret.status_code in ignore_errors:
break
ret.raise_for_status()
json = ret.json()
# process results
# FIXME: This assumes 'return type' matches a specific query,
# it will break if we expand the queries and they dont have different types
if 'data' not in json:
# /hosts/:id does not have a 'data' key
data = json
break
elif isinstance(json['data'], MutableMapping):
# /facts are returned as dict in 'data'
data = json['data']
break
else:
# /hosts 's 'results' is a list of all hosts, returned is paginated
data = data + json['data']
break
self._cache[self.cache_key][url] = data
return self._cache[self.cache_key][url]
def _get_nodes(self):
return self._get_json("%s/api2/json/nodes" % self.proxmox_url)
def _get_pools(self):
return self._get_json("%s/api2/json/pools" % self.proxmox_url)
def _get_lxc_per_node(self, node):
return self._get_json("%s/api2/json/nodes/%s/lxc" % (self.proxmox_url, node))
def _get_qemu_per_node(self, node):
return self._get_json("%s/api2/json/nodes/%s/qemu" % (self.proxmox_url, node))
def _get_members_per_pool(self, pool):
ret = self._get_json("%s/api2/json/pools/%s" % (self.proxmox_url, pool))
return ret['members']
def _get_node_ip(self, node):
ret = self._get_json("%s/api2/json/nodes/%s/network" % (self.proxmox_url, node))
for iface in ret:
try:
return iface['address']
except Exception:
return None
def _get_vm_config(self, node, vmid, vmtype, name):
ret = self._get_json("%s/api2/json/nodes/%s/%s/%s/config" % (self.proxmox_url, node, vmtype, vmid))
vmid_key = 'vmid'
vmid_key = self.to_safe('%s%s' % (self.get_option('facts_prefix'), vmid_key.lower()))
self.inventory.set_variable(name, vmid_key, vmid)
vmtype_key = 'vmtype'
vmtype_key = self.to_safe('%s%s' % (self.get_option('facts_prefix'), vmtype_key.lower()))
self.inventory.set_variable(name, vmtype_key, vmtype)
for config in ret:
key = config
key = self.to_safe('%s%s' % (self.get_option('facts_prefix'), key.lower()))
value = ret[config]
try:
# fixup disk images as they have no key
if config == 'rootfs' or config.startswith(('virtio', 'sata', 'ide', 'scsi')):
value = ('disk_image=' + value)
if isinstance(value, int) or ',' not in value:
value = value
# split off strings with commas to a dict
else:
# skip over any keys that cannot be processed
try:
value = dict(key.split("=") for key in value.split(","))
except Exception:
continue
self.inventory.set_variable(name, key, value)
except NameError:
return None
def _get_vm_status(self, node, vmid, vmtype, name):
ret = self._get_json("%s/api2/json/nodes/%s/%s/%s/status/current" % (self.proxmox_url, node, vmtype, vmid))
status = ret['status']
status_key = 'status'
status_key = self.to_safe('%s%s' % (self.get_option('facts_prefix'), status_key.lower()))
self.inventory.set_variable(name, status_key, status)
def to_safe(self, word):
'''Converts 'bad' characters in a string to underscores so they can be used as Ansible groups
#> ProxmoxInventory.to_safe("foo-bar baz")
'foo_barbaz'
'''
regex = r"[^A-Za-z0-9\_]"
return re.sub(regex, "_", word.replace(" ", ""))
def _populate(self):
self._get_auth()
# gather vm's on nodes
for node in self._get_nodes():
# FIXME: this can probably be cleaner
# create groups
lxc_group = 'all_lxc'
lxc_group = self.to_safe('%s%s' % (self.get_option('group_prefix'), lxc_group.lower()))
self.inventory.add_group(lxc_group)
qemu_group = 'all_qemu'
qemu_group = self.to_safe('%s%s' % (self.get_option('group_prefix'), qemu_group.lower()))
self.inventory.add_group(qemu_group)
nodes_group = 'nodes'
nodes_group = self.to_safe('%s%s' % (self.get_option('group_prefix'), nodes_group.lower()))
self.inventory.add_group(nodes_group)
running_group = 'all_running'
running_group = self.to_safe('%s%s' % (self.get_option('group_prefix'), running_group.lower()))
self.inventory.add_group(running_group)
stopped_group = 'all_stopped'
stopped_group = self.to_safe('%s%s' % (self.get_option('group_prefix'), stopped_group.lower()))
self.inventory.add_group(stopped_group)
if node.get('node'):
self.inventory.add_host(node['node'])
if node['type'] == 'node':
self.inventory.add_child(nodes_group, node['node'])
# get node IP address
ip = self._get_node_ip(node['node'])
self.inventory.set_variable(node['node'], 'ansible_host', ip)
# get LXC containers for this node
node_lxc_group = self.to_safe('%s%s' % (self.get_option('group_prefix'), ('%s_lxc' % node['node']).lower()))
self.inventory.add_group(node_lxc_group)
for lxc in self._get_lxc_per_node(node['node']):
self.inventory.add_host(lxc['name'])
self.inventory.add_child(lxc_group, lxc['name'])
self.inventory.add_child(node_lxc_group, lxc['name'])
# get LXC status when want_facts == True
if self.get_option('want_facts'):
self._get_vm_status(node['node'], lxc['vmid'], 'lxc', lxc['name'])
if lxc['status'] == 'stopped':
self.inventory.add_child(stopped_group, lxc['name'])
elif lxc['status'] == 'running':
self.inventory.add_child(running_group, lxc['name'])
# get LXC config for facts
if self.get_option('want_facts'):
self._get_vm_config(node['node'], lxc['vmid'], 'lxc', lxc['name'])
# get QEMU vm's for this node
node_qemu_group = self.to_safe('%s%s' % (self.get_option('group_prefix'), ('%s_qemu' % node['node']).lower()))
self.inventory.add_group(node_qemu_group)
for qemu in self._get_qemu_per_node(node['node']):
if not qemu['template']:
self.inventory.add_host(qemu['name'])
self.inventory.add_child(qemu_group, qemu['name'])
self.inventory.add_child(node_qemu_group, qemu['name'])
# get QEMU status
self._get_vm_status(node['node'], qemu['vmid'], 'qemu', qemu['name'])
if qemu['status'] == 'stopped':
self.inventory.add_child(stopped_group, qemu['name'])
elif qemu['status'] == 'running':
self.inventory.add_child(running_group, qemu['name'])
# get QEMU config for facts
if self.get_option('want_facts'):
self._get_vm_config(node['node'], qemu['vmid'], 'qemu', qemu['name'])
# gather vm's in pools
for pool in self._get_pools():
if pool.get('poolid'):
pool_group = 'pool_' + pool['poolid']
pool_group = self.to_safe('%s%s' % (self.get_option('group_prefix'), pool_group.lower()))
self.inventory.add_group(pool_group)
for member in self._get_members_per_pool(pool['poolid']):
if member.get('name'):
self.inventory.add_child(pool_group, member['name'])
def parse(self, inventory, loader, path, cache=True):
if not HAS_REQUESTS:
raise AnsibleError('This module requires Python Requests 1.1.0 or higher: '
'https://github.com/psf/requests.')
super(InventoryModule, self).parse(inventory, loader, path)
# read config from file, this sets 'options'
self._read_config_data(path)
# get connection host
self.proxmox_url = self.get_option('url')
self.proxmox_user = self.get_option('user')
self.proxmox_password = self.get_option('password')
self.cache_key = self.get_cache_key(path)
self.use_cache = cache and self.get_option('cache')
# actually populate inventory
self._populate()

View File

@@ -17,7 +17,7 @@ DOCUMENTATION = '''
plugin:
description: token that ensures this is a source file for the 'scaleway' plugin.
required: True
choices: ['scaleway', 'community.general.scaleway']
choices: ['scaleway']
regions:
description: Filter results on a specific Scaleway region
type: list
@@ -60,7 +60,7 @@ EXAMPLES = '''
# use hostname as inventory_hostname
# use the private IP address to connect to the host
plugin: community.general.scaleway
plugin: scaleway
regions:
- ams1
- par1
@@ -73,7 +73,7 @@ variables:
state: state
# use hostname as inventory_hostname and public IP address to connect to the host
plugin: community.general.scaleway
plugin: scaleway
hostnames:
- hostname
regions:

View File

@@ -1,281 +0,0 @@
# Copyright (c) 2020 Shay Rybak <shay.rybak@stackpath.com>
# Copyright (c) 2020 Ansible Project
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
name: stackpath_compute
plugin_type: inventory
short_description: StackPath Edge Computing inventory source
version_added: 1.2.0
extends_documentation_fragment:
- inventory_cache
- constructed
description:
- Get inventory hosts from StackPath Edge Computing.
- Uses a YAML configuration file that ends with stackpath_compute.(yml|yaml).
options:
plugin:
description:
- A token that ensures this is a source file for the plugin.
required: true
choices: ['community.general.stackpath_compute']
client_id:
description:
- An OAuth client ID generated from the API Management section of the StackPath customer portal
U(https://control.stackpath.net/api-management).
required: true
type: str
client_secret:
description:
- An OAuth client secret generated from the API Management section of the StackPath customer portal
U(https://control.stackpath.net/api-management).
required: true
type: str
stack_slugs:
description:
- A list of Stack slugs to query instances in. If no entry then get instances in all stacks on the account.
type: list
elements: str
use_internal_ip:
description:
- Whether or not to use internal IP addresses, If false, uses external IP addresses, internal otherwise.
- If an instance doesn't have an external IP it will not be returned when this option is set to false.
type: bool
'''
EXAMPLES = '''
# Example using credentials to fetch all workload instances in a stack.
---
plugin: community.general.stackpath_compute
client_id: my_client_id
client_secret: my_client_secret
stack_slugs:
- my_first_stack_slug
- my_other_stack_slug
use_internal_ip: false
'''
import traceback
import json
from ansible.errors import AnsibleError
from ansible.module_utils.urls import open_url
from ansible.plugins.inventory import (
BaseInventoryPlugin,
Constructable,
Cacheable
)
from ansible.utils.display import Display
display = Display()
class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
NAME = 'community.general.stackpath_compute'
def __init__(self):
super(InventoryModule, self).__init__()
# credentials
self.client_id = None
self.client_secret = None
self.stack_slug = None
self.api_host = "https://gateway.stackpath.com"
self.group_keys = [
"stackSlug",
"workloadId",
"cityCode",
"countryCode",
"continent",
"target",
"name",
"workloadSlug"
]
def _validate_config(self, config):
if config['plugin'] != 'community.general.stackpath_compute':
raise AnsibleError("plugin doesn't match this plugin")
try:
client_id = config['client_id']
if client_id != 32:
raise AnsibleError("client_id must be 32 characters long")
except KeyError:
raise AnsibleError("config missing client_id, a required option")
try:
client_secret = config['client_secret']
if client_secret != 64:
raise AnsibleError("client_secret must be 64 characters long")
except KeyError:
raise AnsibleError("config missing client_id, a required option")
return True
def _set_credentials(self):
'''
:param config_data: contents of the inventory config file
'''
self.client_id = self.get_option('client_id')
self.client_secret = self.get_option('client_secret')
def _authenticate(self):
payload = json.dumps(
{
"client_id": self.client_id,
"client_secret": self.client_secret,
"grant_type": "client_credentials",
}
)
headers = {
"Content-Type": "application/json",
}
resp = open_url(
self.api_host + '/identity/v1/oauth2/token',
headers=headers,
data=payload,
method="POST"
)
status_code = resp.code
if status_code == 200:
body = resp.read()
self.auth_token = json.loads(body)["access_token"]
def _query(self):
results = []
workloads = []
self._authenticate()
for stack_slug in self.stack_slugs:
try:
workloads = self._stackpath_query_get_list(self.api_host + '/workload/v1/stacks/' + stack_slug + '/workloads')
except Exception:
raise AnsibleError("Failed to get workloads from the StackPath API: %s" % traceback.format_exc())
for workload in workloads:
try:
workload_instances = self._stackpath_query_get_list(
self.api_host + '/workload/v1/stacks/' + stack_slug + '/workloads/' + workload["id"] + '/instances'
)
except Exception:
raise AnsibleError("Failed to get workload instances from the StackPath API: %s" % traceback.format_exc())
for instance in workload_instances:
if instance["phase"] == "RUNNING":
instance["stackSlug"] = stack_slug
instance["workloadId"] = workload["id"]
instance["workloadSlug"] = workload["slug"]
instance["cityCode"] = instance["location"]["cityCode"]
instance["countryCode"] = instance["location"]["countryCode"]
instance["continent"] = instance["location"]["continent"]
instance["target"] = instance["metadata"]["labels"]["workload.platform.stackpath.net/target-name"]
try:
if instance[self.hostname_key]:
results.append(instance)
except KeyError:
pass
return results
def _populate(self, instances):
for instance in instances:
for group_key in self.group_keys:
group = group_key + "_" + instance[group_key]
group = group.lower().replace(" ", "_").replace("-", "_")
self.inventory.add_group(group)
self.inventory.add_host(instance[self.hostname_key],
group=group)
def _stackpath_query_get_list(self, url):
self._authenticate()
headers = {
"Content-Type": "application/json",
"Authorization": "Bearer " + self.auth_token,
}
next_page = True
result = []
cursor = '-1'
while next_page:
resp = open_url(
url + '?page_request.first=10&page_request.after=%s' % cursor,
headers=headers,
method="GET"
)
status_code = resp.code
if status_code == 200:
body = resp.read()
body_json = json.loads(body)
result.extend(body_json["results"])
next_page = body_json["pageInfo"]["hasNextPage"]
if next_page:
cursor = body_json["pageInfo"]["endCursor"]
return result
def _get_stack_slugs(self, stacks):
self.stack_slugs = [stack["slug"] for stack in stacks]
def verify_file(self, path):
'''
:param loader: an ansible.parsing.dataloader.DataLoader object
:param path: the path to the inventory config file
:return the contents of the config file
'''
if super(InventoryModule, self).verify_file(path):
if path.endswith(('stackpath_compute.yml', 'stackpath_compute.yaml')):
return True
display.debug(
"stackpath_compute inventory filename must end with \
'stackpath_compute.yml' or 'stackpath_compute.yaml'"
)
return False
def parse(self, inventory, loader, path, cache=True):
super(InventoryModule, self).parse(inventory, loader, path)
config = self._read_config_data(path)
self._validate_config(config)
self._set_credentials()
# get user specifications
self.use_internal_ip = self.get_option('use_internal_ip')
if self.use_internal_ip:
self.hostname_key = "ipAddress"
else:
self.hostname_key = "externalIpAddress"
self.stack_slugs = self.get_option('stack_slugs')
if not self.stack_slugs:
try:
stacks = self._stackpath_query_get_list(self.api_host + '/stack/v1/stacks')
self._get_stack_slugs(stacks)
except Exception:
raise AnsibleError("Failed to get stack IDs from the Stackpath API: %s" % traceback.format_exc())
cache_key = self.get_cache_key(path)
# false when refresh_cache or --flush-cache is used
if cache:
# get the user-specified directive
cache = self.get_option('cache')
# Generate inventory
cache_needs_update = False
if cache:
try:
results = self._cache[cache_key]
except KeyError:
# if cache expires or cache file doesn't exist
cache_needs_update = True
if not cache or cache_needs_update:
results = self._query()
self._populate(results)
# If the cache has expired/doesn't exist or
# if refresh_inventory/flush cache is used
# when the user is using caching, update the cached inventory
try:
if cache_needs_update or (not cache and self.get_option('cache')):
self._cache[cache_key] = results
except Exception:
raise AnsibleError("Failed to populate data: %s" % traceback.format_exc())

View File

@@ -5,7 +5,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
name: virtualbox
plugin_type: inventory
short_description: virtualbox inventory source
@@ -20,7 +19,7 @@ DOCUMENTATION = '''
plugin:
description: token that ensures this is a source file for the 'virtualbox' plugin
required: True
choices: ['virtualbox', 'community.general.virtualbox']
choices: ['virtualbox']
running_only:
description: toggles showing all vms vs only those currently running
type: boolean
@@ -39,7 +38,7 @@ DOCUMENTATION = '''
EXAMPLES = '''
# file must be named vbox.yaml or vbox.yml
simple_config_file:
plugin: community.general.virtualbox
plugin: virtualbox
settings_password_file: /etc/virtulbox/secrets
query:
logged_in_users: /VirtualBox/GuestInfo/OS/LoggedInUsersList
@@ -47,7 +46,7 @@ simple_config_file:
ansible_connection: ('indows' in vbox_Guest_OS)|ternary('winrm', 'ssh')
# add hosts (all match with minishift vm) to the group container if any of the vms are in ansible_inventory'
plugin: community.general.virtualbox
plugin: virtualbox
groups:
container: "'minis' in (inventory_hostname)"
'''

View File

@@ -5,7 +5,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
lookup: cartesian
short_description: returns the cartesian product of lists
description:
@@ -21,13 +20,11 @@ DOCUMENTATION = '''
EXAMPLES = """
- name: Example of the change in the description
ansible.builtin.debug:
msg: "{{ lookup('community.general.cartesian', [1,2,3], [a, b])}}"
debug: msg="{{ lookup('cartesian', [1,2,3], [a, b])}}"
- name: loops over the cartesian product of the supplied lists
ansible.builtin.debug:
msg: "{{item}}"
with_community.general.cartesian:
debug: msg="{{item}}"
with_cartesian:
- "{{list1}}"
- "{{list2}}"
- [1,2,3,4,5,6]
@@ -37,8 +34,7 @@ RETURN = """
_list:
description:
- list of lists composed of elements of the input lists
type: list
elements: list
type: lists
"""
from itertools import product

View File

@@ -5,7 +5,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
lookup: chef_databag
short_description: fetches data from a Chef Databag
description:
@@ -28,16 +27,14 @@ DOCUMENTATION = '''
'''
EXAMPLES = """
- ansible.builtin.debug:
msg: "{{ lookup('community.general.chef_databag', 'name=data_bag_name item=data_bag_item') }}"
- debug:
msg: "{{ lookup('chef_databag', 'name=data_bag_name item=data_bag_item') }}"
"""
RETURN = """
_raw:
description:
- The value from the databag.
type: list
elements: dict
- The value from the databag
"""
from ansible.errors import AnsibleError

View File

@@ -0,0 +1,159 @@
# (c) 2018, Jason Vanderhoof <jason.vanderhoof@cyberark.com>, Oren Ben Meir <oren.benmeir@cyberark.com>
# (c) 2018 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
lookup: conjur_variable
short_description: Fetch credentials from CyberArk Conjur.
description:
- "Retrieves credentials from Conjur using the controlling host's Conjur identity. Conjur info: U(https://www.conjur.org/)."
requirements:
- 'The controlling host running Ansible has a Conjur identity.
(More: U(https://docs.conjur.org/Latest/en/Content/Get%20Started/key_concepts/machine_identity.html))'
options:
_term:
description: Variable path
required: True
identity_file:
description: Path to the Conjur identity file. The identity file follows the netrc file format convention.
type: path
default: /etc/conjur.identity
required: False
ini:
- section: conjur,
key: identity_file_path
env:
- name: CONJUR_IDENTITY_FILE
config_file:
description: Path to the Conjur configuration file. The configuration file is a YAML file.
type: path
default: /etc/conjur.conf
required: False
ini:
- section: conjur,
key: config_file_path
env:
- name: CONJUR_CONFIG_FILE
'''
EXAMPLES = """
- debug:
msg: "{{ lookup('conjur_variable', '/path/to/secret') }}"
"""
RETURN = """
_raw:
description:
- Value stored in Conjur.
"""
import os.path
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
from base64 import b64encode
from netrc import netrc
from os import environ
from time import time
from ansible.module_utils.six.moves.urllib.parse import quote_plus
import yaml
from ansible.module_utils.urls import open_url
from ansible.utils.display import Display
display = Display()
# Load configuration and return as dictionary if file is present on file system
def _load_conf_from_file(conf_path):
display.vvv('conf file: {0}'.format(conf_path))
if not os.path.exists(conf_path):
raise AnsibleError('Conjur configuration file `{0}` was not found on the controlling host'
.format(conf_path))
display.vvvv('Loading configuration from: {0}'.format(conf_path))
with open(conf_path) as f:
config = yaml.safe_load(f.read())
if 'account' not in config or 'appliance_url' not in config:
raise AnsibleError('{0} on the controlling host must contain an `account` and `appliance_url` entry'
.format(conf_path))
return config
# Load identity and return as dictionary if file is present on file system
def _load_identity_from_file(identity_path, appliance_url):
display.vvvv('identity file: {0}'.format(identity_path))
if not os.path.exists(identity_path):
raise AnsibleError('Conjur identity file `{0}` was not found on the controlling host'
.format(identity_path))
display.vvvv('Loading identity from: {0} for {1}'.format(identity_path, appliance_url))
conjur_authn_url = '{0}/authn'.format(appliance_url)
identity = netrc(identity_path)
if identity.authenticators(conjur_authn_url) is None:
raise AnsibleError('The netrc file on the controlling host does not contain an entry for: {0}'
.format(conjur_authn_url))
id, account, api_key = identity.authenticators(conjur_authn_url)
if not id or not api_key:
raise AnsibleError('{0} on the controlling host must contain a `login` and `password` entry for {1}'
.format(identity_path, appliance_url))
return {'id': id, 'api_key': api_key}
# Use credentials to retrieve temporary authorization token
def _fetch_conjur_token(conjur_url, account, username, api_key):
conjur_url = '{0}/authn/{1}/{2}/authenticate'.format(conjur_url, account, username)
display.vvvv('Authentication request to Conjur at: {0}, with user: {1}'.format(conjur_url, username))
response = open_url(conjur_url, data=api_key, method='POST')
code = response.getcode()
if code != 200:
raise AnsibleError('Failed to authenticate as \'{0}\' (got {1} response)'
.format(username, code))
return response.read()
# Retrieve Conjur variable using the temporary token
def _fetch_conjur_variable(conjur_variable, token, conjur_url, account):
token = b64encode(token)
headers = {'Authorization': 'Token token="{0}"'.format(token)}
display.vvvv('Header: {0}'.format(headers))
url = '{0}/secrets/{1}/variable/{2}'.format(conjur_url, account, quote_plus(conjur_variable))
display.vvvv('Conjur Variable URL: {0}'.format(url))
response = open_url(url, headers=headers, method='GET')
if response.getcode() == 200:
display.vvvv('Conjur variable {0} was successfully retrieved'.format(conjur_variable))
return [response.read()]
if response.getcode() == 401:
raise AnsibleError('Conjur request has invalid authorization credentials')
if response.getcode() == 403:
raise AnsibleError('The controlling host\'s Conjur identity does not have authorization to retrieve {0}'
.format(conjur_variable))
if response.getcode() == 404:
raise AnsibleError('The variable {0} does not exist'.format(conjur_variable))
return {}
class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
conf_file = self.get_option('config_file')
conf = _load_conf_from_file(conf_file)
identity_file = self.get_option('identity_file')
identity = _load_identity_from_file(identity_file, conf['appliance_url'])
token = _fetch_conjur_token(conf['appliance_url'], conf['account'], identity['id'], identity['api_key'])
return _fetch_conjur_variable(terms[0], token, conf['appliance_url'], conf['account'])

View File

@@ -6,7 +6,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
lookup: consul_kv
short_description: Fetch metadata from a Consul key value store.
description:
@@ -28,7 +27,7 @@ DOCUMENTATION = '''
- If the key has a value with the specified index then this is returned allowing access to historical values.
datacenter:
description:
- Retrieve the key from a consul datacenter other than the default for the consul host.
- Retrieve the key from a consul datatacenter other than the default for the consul host.
token:
description: The acl token to allow access to restricted values.
host:
@@ -70,7 +69,7 @@ DOCUMENTATION = '''
url:
description: "The target to connect to, should look like this: C(https://my.consul.server:8500)."
type: str
version_added: 1.0.0
version_added: 0.3.0
env:
- name: ANSIBLE_CONSUL_URL
ini:
@@ -79,27 +78,26 @@ DOCUMENTATION = '''
'''
EXAMPLES = """
- ansible.builtin.debug:
- debug:
msg: 'key contains {{item}}'
with_community.general.consul_kv:
with_consul_kv:
- 'key/to/retrieve'
- name: Parameters can be provided after the key be more specific about what to retrieve
ansible.builtin.debug:
debug:
msg: 'key contains {{item}}'
with_community.general.consul_kv:
with_consul_kv:
- 'key/to recurse=true token=E6C060A9-26FB-407A-B83E-12DDAFCB4D98'
- name: retrieving a KV from a remote cluster on non default port
ansible.builtin.debug:
msg: "{{ lookup('community.general.consul_kv', 'my/key', host='10.10.10.10', port='2000') }}"
debug:
msg: "{{ lookup('consul_kv', 'my/key', host='10.10.10.10', port='2000') }}"
"""
RETURN = """
_raw:
description:
- Value(s) stored in consul.
type: dict
"""
import os

View File

@@ -5,7 +5,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
lookup: credstash
short_description: retrieve secrets from Credstash on AWS
requirements:
@@ -45,19 +44,16 @@ DOCUMENTATION = '''
EXAMPLES = """
- name: first use credstash to store your secrets
ansible.builtin.shell: credstash put my-github-password secure123
shell: credstash put my-github-password secure123
- name: "Test credstash lookup plugin -- get my github password"
ansible.builtin.debug:
msg: "Credstash lookup! {{ lookup('community.general.credstash', 'my-github-password') }}"
debug: msg="Credstash lookup! {{ lookup('credstash', 'my-github-password') }}"
- name: "Test credstash lookup plugin -- get my other password from us-west-1"
ansible.builtin.debug:
msg: "Credstash lookup! {{ lookup('community.general.credstash', 'my-other-password', region='us-west-1') }}"
debug: msg="Credstash lookup! {{ lookup('credstash', 'my-other-password', region='us-west-1') }}"
- name: "Test credstash lookup plugin -- get the company's github password"
ansible.builtin.debug:
msg: "Credstash lookup! {{ lookup('community.general.credstash', 'company-github-password', table='company-passwords') }}"
debug: msg="Credstash lookup! {{ lookup('credstash', 'company-github-password', table='company-passwords') }}"
- name: Example play using the 'context' feature
hosts: localhost
@@ -68,19 +64,16 @@ EXAMPLES = """
tasks:
- name: "Test credstash lookup plugin -- get the password with a context passed as a variable"
ansible.builtin.debug:
msg: "{{ lookup('community.general.credstash', 'some-password', context=context) }}"
debug: msg="{{ lookup('credstash', 'some-password', context=context) }}"
- name: "Test credstash lookup plugin -- get the password with a context defined here"
ansible.builtin.debug:
msg: "{{ lookup('community.general.credstash', 'some-password', context=dict(app='my_app', environment='production')) }}"
debug: msg="{{ lookup('credstash', 'some-password', context=dict(app='my_app', environment='production')) }}"
"""
RETURN = """
_raw:
description:
- Value(s) stored in Credstash.
type: str
- value(s) stored in Credstash
"""
import os

View File

@@ -5,7 +5,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
author: Unknown (!UNKNOWN)
lookup: cyberarkpassword
short_description: get secrets from CyberArk AIM
requirements:
@@ -30,15 +29,14 @@ DOCUMENTATION = '''
- "They could be: Password, PassProps.<property>, PasswordChangeInProcess"
default: 'password'
_extra:
description: for extra_params values please check parameters for clipasswordsdk in CyberArk's "Credential Provider and ASCP Implementation Guide"
notes:
- For Ansible on Windows, please change the -parameters (-p, -d, and -o) to /parameters (/p, /d, and /o) and change the location of CLIPasswordSDK.exe.
description: for extra_parms values please check parameters for clipasswordsdk in CyberArk's "Credential Provider and ASCP Implementation Guide"
note:
- For Ansible on windows, please change the -parameters (-p, -d, and -o) to /parameters (/p, /d, and /o) and change the location of CLIPasswordSDK.exe
'''
EXAMPLES = """
- name: passing options to the lookup
ansible.builtin.debug:
msg: '{{ lookup("community.general.cyberarkpassword", cyquery) }}'
debug: msg={{ lookup("cyberarkpassword", cyquery)}}
vars:
cyquery:
appid: "app_ansible"
@@ -47,9 +45,8 @@ EXAMPLES = """
- name: used in a loop
ansible.builtin.debug:
msg: "{{item}}"
with_community.general.cyberarkpassword:
debug: msg={{item}}
with_cyberarkpassword:
appid: 'app_ansible'
query: 'safe=CyberArk_Passwords;folder=root;object=AdminPass'
output: 'Password,PassProps.UserName,PassProps.Address,PasswordChangeInProcess'

View File

@@ -44,43 +44,32 @@ DOCUMENTATION = '''
EXAMPLES = """
- name: Simple A record (IPV4 address) lookup for example.com
ansible.builtin.debug:
msg: "{{ lookup('community.general.dig', 'example.com.')}}"
debug: msg="{{ lookup('dig', 'example.com.')}}"
- name: "The TXT record for example.org."
ansible.builtin.debug:
msg: "{{ lookup('community.general.dig', 'example.org.', 'qtype=TXT') }}"
debug: msg="{{ lookup('dig', 'example.org.', 'qtype=TXT') }}"
- name: "The TXT record for example.org, alternative syntax."
ansible.builtin.debug:
msg: "{{ lookup('community.general.dig', 'example.org./TXT') }}"
debug: msg="{{ lookup('dig', 'example.org./TXT') }}"
- name: use in a loop
ansible.builtin.debug:
msg: "MX record for gmail.com {{ item }}"
with_items: "{{ lookup('community.general.dig', 'gmail.com./MX', wantlist=True) }}"
debug: msg="MX record for gmail.com {{ item }}"
with_items: "{{ lookup('dig', 'gmail.com./MX', wantlist=True) }}"
- ansible.builtin.debug:
msg: "Reverse DNS for 192.0.2.5 is {{ lookup('community.general.dig', '192.0.2.5/PTR') }}"
- ansible.builtin.debug:
msg: "Reverse DNS for 192.0.2.5 is {{ lookup('community.general.dig', '5.2.0.192.in-addr.arpa./PTR') }}"
- ansible.builtin.debug:
msg: "Reverse DNS for 192.0.2.5 is {{ lookup('community.general.dig', '5.2.0.192.in-addr.arpa.', 'qtype=PTR') }}"
- ansible.builtin.debug:
msg: "Querying 198.51.100.23 for IPv4 address for example.com. produces {{ lookup('dig', 'example.com', '@198.51.100.23') }}"
- debug: msg="Reverse DNS for 192.0.2.5 is {{ lookup('dig', '192.0.2.5/PTR') }}"
- debug: msg="Reverse DNS for 192.0.2.5 is {{ lookup('dig', '5.2.0.192.in-addr.arpa./PTR') }}"
- debug: msg="Reverse DNS for 192.0.2.5 is {{ lookup('dig', '5.2.0.192.in-addr.arpa.', 'qtype=PTR') }}"
- debug: msg="Querying 198.51.100.23 for IPv4 address for example.com. produces {{ lookup('dig', 'example.com', '@198.51.100.23') }}"
- ansible.builtin.debug:
msg: "XMPP service for gmail.com. is available at {{ item.target }} on port {{ item.port }}"
with_items: "{{ lookup('community.general.dig', '_xmpp-server._tcp.gmail.com./SRV', 'flat=0', wantlist=True) }}"
- debug: msg="XMPP service for gmail.com. is available at {{ item.target }} on port {{ item.port }}"
with_items: "{{ lookup('dig', '_xmpp-server._tcp.gmail.com./SRV', 'flat=0', wantlist=True) }}"
"""
RETURN = """
_list:
description:
- List of composed strings or dictionaries with key and value
- list of composed strings or dictonaries with key and value
If a dictionary, fields shows the keys returned depending on query type
type: list
elements: raw
contains:
ALL:
description:

View File

@@ -21,21 +21,18 @@ DOCUMENTATION = '''
EXAMPLES = """
- name: show txt entry
ansible.builtin.debug:
msg: "{{lookup('community.general.dnstxt', ['test.example.com'])}}"
debug: msg="{{lookup('dnstxt', ['test.example.com'])}}"
- name: iterate over txt entries
ansible.builtin.debug:
msg: "{{item}}"
with_community.general.dnstxt:
debug: msg="{{item}}"
with_dnstxt:
- 'test.example.com'
- 'other.example.com'
- 'last.example.com'
- name: iterate of a comma delimited DNS TXT entry
ansible.builtin.debug:
msg: "{{item}}"
with_community.general.dnstxt: "{{lookup('community.general.dnstxt', ['test.example.com']).split(',')}}"
debug: msg="{{item}}"
with_dnstxt: "{{lookup('dnstxt', ['test.example.com']).split(',')}}"
"""
RETURN = """

View File

@@ -1,140 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright: (c) 2020, Adam Migus <adam@migus.org>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r"""
lookup: dsv
author: Adam Migus (@amigus) <adam@migus.org>
short_description: Get secrets from Thycotic DevOps Secrets Vault
version_added: 1.0.0
description:
- Uses the Thycotic DevOps Secrets Vault Python SDK to get Secrets from a
DSV I(tenant) using a I(client_id) and I(client_secret).
requirements:
- python-dsv-sdk - https://pypi.org/project/python-dsv-sdk/
options:
_terms:
description: The path to the secret, e.g. C(/staging/servers/web1).
required: true
tenant:
description: The first format parameter in the default I(url_template).
env:
- name: DSV_TENANT
ini:
- section: dsv_lookup
key: tenant
required: true
tld:
default: com
description: The top-level domain of the tenant; the second format
parameter in the default I(url_template).
env:
- name: DSV_TLD
ini:
- section: dsv_lookup
key: tld
required: false
client_id:
description: The client_id with which to request the Access Grant.
env:
- name: DSV_CLIENT_ID
ini:
- section: dsv_lookup
key: client_id
required: true
client_secret:
description: The client secret associated with the specific I(client_id).
env:
- name: DSV_CLIENT_SECRET
ini:
- section: dsv_lookup
key: client_secret
required: true
url_template:
default: https://{}.secretsvaultcloud.{}/v1
description: The path to prepend to the base URL to form a valid REST
API request.
env:
- name: DSV_URL_TEMPLATE
ini:
- section: dsv_lookup
key: url_template
required: false
"""
RETURN = r"""
_list:
description:
- One or more JSON responses to C(GET /secrets/{path}).
- See U(https://dsv.thycotic.com/api/index.html#operation/getSecret).
type: list
elements: dict
"""
EXAMPLES = r"""
- hosts: localhost
vars:
secret: "{{ lookup('community.general.dsv', '/test/secret') }}"
tasks:
- ansible.builtin.debug:
msg: 'the password is {{ secret["data"]["password"] }}'
"""
from ansible.errors import AnsibleError, AnsibleOptionsError
sdk_is_missing = False
try:
from thycotic.secrets.vault import (
SecretsVault,
SecretsVaultError,
)
except ImportError:
sdk_is_missing = True
from ansible.utils.display import Display
from ansible.plugins.lookup import LookupBase
display = Display()
class LookupModule(LookupBase):
@staticmethod
def Client(vault_parameters):
return SecretsVault(**vault_parameters)
def run(self, terms, variables, **kwargs):
if sdk_is_missing:
raise AnsibleError("python-dsv-sdk must be installed to use this plugin")
self.set_options(var_options=variables, direct=kwargs)
vault = LookupModule.Client(
{
"tenant": self.get_option("tenant"),
"client_id": self.get_option("client_id"),
"client_secret": self.get_option("client_secret"),
"url_template": self.get_option("url_template"),
}
)
result = []
for term in terms:
display.debug("dsv_lookup term: %s" % term)
try:
path = term.lstrip("[/:]")
if path == "":
raise AnsibleOptionsError("Invalid secret path: %s" % term)
display.vvv(u"DevOps Secrets Vault GET /secrets/%s" % path)
result.append(vault.get_secret_json(path))
except SecretsVaultError as error:
raise AnsibleError(
"DevOps Secrets Vault lookup failure: %s" % error.message
)
return result

View File

@@ -53,17 +53,14 @@ DOCUMENTATION = '''
'''
EXAMPLES = '''
- name: "a value from a locally running etcd"
ansible.builtin.debug:
msg: "{{ lookup('community.general.etcd', 'foo/bar') }}"
- name: "a value from a locally running etcd"
debug: msg={{ lookup('etcd', 'foo/bar') }}
- name: "values from multiple folders on a locally running etcd"
ansible.builtin.debug:
msg: "{{ lookup('community.general.etcd', 'foo', 'bar', 'baz') }}"
- name: "values from multiple folders on a locally running etcd"
debug: msg={{ lookup('etcd', 'foo', 'bar', 'baz') }}
- name: "since Ansible 2.5 you can set server options inline"
ansible.builtin.debug:
msg: "{{ lookup('community.general.etcd', 'foo', version='v2', url='http://192.168.0.27:4001') }}"
- name: "since Ansible 2.5 you can set server options inline"
debug: msg="{{ lookup('etcd', 'foo', version='v2', url='http://192.168.0.27:4001') }}"
'''
RETURN = '''
@@ -71,7 +68,7 @@ RETURN = '''
description:
- list of values associated with input keys
type: list
elements: string
elements: strings
'''
import json
@@ -85,7 +82,7 @@ from ansible.module_utils.urls import open_url
# If etcd v2 running on host 192.168.1.21 on port 2379
# we can use the following in a playbook to retrieve /tfm/network/config key
#
# - ansible.builtin.debug: msg={{lookup('etcd','/tfm/network/config', url='http://192.168.1.21:2379' , version='v2')}}
# - debug: msg={{lookup('etcd','/tfm/network/config', url='http://192.168.1.21:2379' , version='v2')}}
#
# Example Output:
#

View File

@@ -8,7 +8,7 @@ __metaclass__ = type
DOCUMENTATION = '''
author:
- Eric Belhomme (@eric-belhomme) <ebelhomme@fr.scc.com>
- Eric Belhomme <ebelhomme@fr.scc.com>
version_added: '0.2.0'
lookup: etcd3
short_description: Get key values from etcd3 server
@@ -31,7 +31,7 @@ DOCUMENTATION = '''
default: False
endpoints:
description:
- Counterpart of C(ETCDCTL_ENDPOINTS) environment variable.
- Counterpart of C(ETCDCTL_ENDPOINTS) enviroment variable.
Specify the etcd3 connection with and URL form eg. C(https://hostname:2379) or C(<host>:<port>) form.
- The C(host) part is overwritten by I(host) option, if defined.
- The C(port) part is overwritten by I(port) option, if defined.
@@ -76,46 +76,45 @@ DOCUMENTATION = '''
type: int
user:
description:
- Authenticated user name.
- Authentified user name.
env:
- name: ETCDCTL_USER
type: str
password:
description:
- Authenticated user password.
- Authentified user password.
env:
- name: ETCDCTL_PASSWORD
type: str
notes:
- I(host) and I(port) options take precedence over (endpoints) option.
- The recommended way to connect to etcd3 server is using C(ETCDCTL_ENDPOINT)
- The recommanded way to connect to etcd3 server is using C(ETCDCTL_ENDPOINT)
environment variable and keep I(endpoints), I(host), and I(port) unused.
seealso:
- module: community.general.etcd3
- ref: etcd_lookup
description: The etcd v2 lookup.
requirements:
- "etcd3 >= 0.10"
'''
EXAMPLES = '''
- name: "a value from a locally running etcd"
ansible.builtin.debug:
msg: "{{ lookup('community.general.etcd3', 'foo/bar') }}"
- name: "a value from a locally running etcd"
debug:
msg: "{{ lookup('community.general.etcd3', 'foo/bar') }}"
- name: "values from multiple folders on a locally running etcd"
ansible.builtin.debug:
msg: "{{ lookup('community.general.etcd3', 'foo', 'bar', 'baz') }}"
- name: "values from multiple folders on a locally running etcd"
debug:
msg: "{{ lookup('community.general.etcd3', 'foo', 'bar', 'baz') }}"
- name: "look for a key prefix"
ansible.builtin.debug:
msg: "{{ lookup('community.general.etcd3', '/foo/bar', prefix=True) }}"
- name: "look for a key prefix"
debug:
msg: "{{ lookup('community.general.etcd3', '/foo/bar', prefix=True) }}"
- name: "connect to etcd3 with a client certificate"
ansible.builtin.debug:
msg: "{{ lookup('community.general.etcd3', 'foo/bar', cert_cert='/etc/ssl/etcd/client.pem', cert_key='/etc/ssl/etcd/client.key') }}"
- name: "connect to etcd3 with a client certificate"
debug:
msg: "{{ lookup('community.general.etcd3', 'foo/bar', cert_cert='/etc/ssl/etcd/client.pem', cert_key='/etc/ssl/etcd/client.key') }}"
'''
RETURN = '''

View File

@@ -4,7 +4,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = r'''
DOCUMENTATION = '''
lookup: filetree
author: Dag Wieers (@dagwieers) <dag@wieers.com>
short_description: recursively match all files in a directory tree
@@ -19,94 +19,72 @@ options:
required: True
'''
EXAMPLES = r"""
EXAMPLES = """
- name: Create directories
ansible.builtin.file:
file:
path: /web/{{ item.path }}
state: directory
mode: '{{ item.mode }}'
with_community.general.filetree: web/
with_filetree: web/
when: item.state == 'directory'
- name: Template files (explicitly skip directories in order to use the 'src' attribute)
ansible.builtin.template:
template:
src: '{{ item.src }}'
dest: /web/{{ item.path }}
mode: '{{ item.mode }}'
with_community.general.filetree: web/
with_filetree: web/
when: item.state == 'file'
- name: Recreate symlinks
ansible.builtin.file:
file:
src: '{{ item.src }}'
dest: /web/{{ item.path }}
state: link
force: yes
mode: '{{ item.mode }}'
with_community.general.filetree: web/
with_filetree: web/
when: item.state == 'link'
- name: list all files under web/
ansible.builtin.debug:
msg: "{{ lookup('community.general.filetree', 'web/') }}"
"""
RETURN = r"""
RETURN = """
_raw:
description: List of dictionaries with file information.
type: list
elements: dict
description: list of dictionaries with file information
contains:
src:
description:
- Full path to file.
- Not returned when I(item.state) is set to C(directory).
type: path
- full path to file
- not returned when C(item.state) is set to C(directory)
root:
description: Allows filtering by original location.
type: path
description: allows filtering by original location
path:
description: Contains the relative path to root.
type: path
description: contains the relative path to root
mode:
description: The permissions the resulting file or directory.
type: str
description: TODO
state:
description: TODO
type: str
owner:
description: Name of the user that owns the file/directory.
type: raw
description: TODO
group:
description: Name of the group that owns the file/directory.
type: raw
description: TODO
seuser:
description: The user part of the SELinux file context.
type: raw
description: TODO
serole:
description: The role part of the SELinux file context.
type: raw
description: TODO
setype:
description: The type part of the SELinux file context.
type: raw
description: TODO
selevel:
description: The level part of the SELinux file context.
type: raw
description: TODO
uid:
description: Owner ID of the file/directory.
type: int
description: TODO
gid:
description: Group ID of the file/directory.
type: int
description: TODO
size:
description: Size of the target.
type: int
description: TODO
mtime:
description: Time of last modification.
type: float
description: TODO
ctime:
description: Time of last metadata update or creation (depends on OS).
type: float
description: TODO
"""
import os
import pwd

View File

@@ -6,7 +6,7 @@ __metaclass__ = type
DOCUMENTATION = '''
lookup: flattened
author: Serge van Ginderachter (!UNKNOWN) <serge@vanginderachter.be>
author: Serge van Ginderachter <serge@vanginderachter.be>
short_description: return single list completely flattened
description:
- given one or more lists, this lookup will flatten any list elements found recursively until only 1 list is left.
@@ -21,8 +21,7 @@ DOCUMENTATION = '''
EXAMPLES = """
- name: "'unnest' all elements into single list"
ansible.builtin.debug:
msg: "all in one list {{lookup('community.general.flattened', [1,2,3,[5,6]], [a,b,c], [[5,6,1,3], [34,a,b,c]])}}"
debug: msg="all in one list {{lookup('flattened', [1,2,3,[5,6]], [a,b,c], [[5,6,1,3], [34,a,b,c]])}}"
"""
RETURN = """

View File

@@ -9,7 +9,7 @@ lookup: gcp_storage_file
description:
- This lookup returns the contents from a file residing on Google Cloud Storage
short_description: Return GC Storage content
author: Eric Anderson (!UNKNOWN) <eanderson@avinetworks.com>
author: Eric Anderson <eanderson@avinetworks.com>
requirements:
- python >= 2.6
- requests >= 2.18.4
@@ -29,19 +29,15 @@ extends_documentation_fragment:
'''
EXAMPLES = '''
- ansible.builtin.debug:
msg: |
the value of foo.txt is {{ lookup('community.general.gcp_storage_file',
bucket='gcp-bucket', src='mydir/foo.txt', project='project-name',
auth_kind='serviceaccount', service_account_file='/tmp/myserviceaccountfile.json') }}
- debug: msg="the value of foo.txt is {{ lookup('gcp_storage_file',
bucket='gcp-bucket', src='mydir/foo.txt', project='project-name',
auth_kind='serviceaccount', service_account_file='/tmp/myserviceaccountfile.json') }}"
'''
RETURN = '''
_raw:
description:
- base64 encoded file content
type: list
elements: str
'''
import base64
@@ -145,5 +141,5 @@ class GcpFileLookup():
class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
if not HAS_GOOGLE_CLOUD_COLLECTION:
raise AnsibleError("community.general.gcp_storage_file needs a supported version of the google.cloud collection installed")
raise AnsibleError("community.general.gcp_storage_files needs a supported version of the google.cloud collection installed")
return GcpFileLookup().run(terms, variables=variables, **kwargs)

View File

@@ -9,7 +9,7 @@ __metaclass__ = type
DOCUMENTATION = """
lookup: hashi_vault
author:
- Jonathan Davila (!UNKNOWN) <jdavila(at)ansible.com>
- Jonathan Davila <jdavila(at)ansible.com>
- Brian Scholer (@briantist)
short_description: Retrieve secrets from HashiCorp's vault
requirements:
@@ -38,17 +38,13 @@ DOCUMENTATION = """
token_path:
description: If no token is specified, will try to read the token file from this path.
env:
- name: VAULT_TOKEN_PATH
version_added: 1.2.0
- name: HOME
ini:
- section: lookup_hashi_vault
key: token_path
version_added: '0.2.0'
token_file:
description: If no token is specified, will try to read the token from this file in C(token_path).
env:
- name: VAULT_TOKEN_FILE
version_added: 1.2.0
ini:
- section: lookup_hashi_vault
key: token_file
@@ -121,9 +117,6 @@ DOCUMENTATION = """
default: True
namespace:
description: Namespace where secrets reside. Requires HVAC 0.7.0+ and Vault 0.11+.
env:
- name: VAULT_NAMESPACE
version_added: 1.2.0
aws_profile:
description: The AWS profile
type: str
@@ -168,79 +161,79 @@ DOCUMENTATION = """
"""
EXAMPLES = """
- ansible.builtin.debug:
- debug:
msg: "{{ lookup('community.general.hashi_vault', 'secret=secret/hello:value token=c975b780-d1be-8016-866b-01d0f9b688a5 url=http://myvault:8200') }}"
- name: Return all secrets from a path
ansible.builtin.debug:
debug:
msg: "{{ lookup('community.general.hashi_vault', 'secret=secret/hello token=c975b780-d1be-8016-866b-01d0f9b688a5 url=http://myvault:8200') }}"
- name: Vault that requires authentication via LDAP
ansible.builtin.debug:
debug:
msg: "{{ lookup('community.general.hashi_vault', 'secret/hello:value auth_method=ldap mount_point=ldap username=myuser password=mypas') }}"
- name: Vault that requires authentication via username and password
ansible.builtin.debug:
debug:
msg: "{{ lookup('community.general.hashi_vault', 'secret=secret/hello:value auth_method=userpass username=myuser password=psw url=http://myvault:8200') }}"
- name: Using an ssl vault
ansible.builtin.debug:
debug:
msg: "{{ lookup('community.general.hashi_vault', 'secret=secret/hola:value token=c975b780-d1be-8016-866b-01d0f9b688a5 validate_certs=False') }}"
- name: using certificate auth
ansible.builtin.debug:
debug:
msg: "{{ lookup('community.general.hashi_vault', 'secret/hi:value token=xxxx url=https://myvault:8200 validate_certs=True cacert=/cacert/path/ca.pem') }}"
- name: authenticate with a Vault app role
ansible.builtin.debug:
debug:
msg: "{{ lookup('community.general.hashi_vault', 'secret=secret/hello:value auth_method=approle role_id=myroleid secret_id=mysecretid') }}"
- name: Return all secrets from a path in a namespace
ansible.builtin.debug:
debug:
msg: "{{ lookup('community.general.hashi_vault', 'secret=secret/hello token=c975b780-d1be-8016-866b-01d0f9b688a5 namespace=teama/admins') }}"
# When using KV v2 the PATH should include "data" between the secret engine mount and path (e.g. "secret/data/:path")
# see: https://www.vaultproject.io/api/secret/kv/kv-v2.html#read-secret-version
- name: Return latest KV v2 secret from path
ansible.builtin.debug:
debug:
msg: "{{ lookup('community.general.hashi_vault', 'secret=secret/data/hello token=my_vault_token url=http://myvault_url:8200') }}"
# The following examples work in collection releases after community.general 0.2.0
- name: secret= is not required if secret is first
ansible.builtin.debug:
debug:
msg: "{{ lookup('community.general.hashi_vault', 'secret/data/hello token=<token> url=http://myvault_url:8200') }}"
- name: options can be specified as parameters rather than put in term string
ansible.builtin.debug:
debug:
msg: "{{ lookup('community.general.hashi_vault', 'secret/data/hello', token=my_token_var, url='http://myvault_url:8200') }}"
# return_format (or its alias 'as') can control how secrets are returned to you
- name: return secrets as a dict (default)
ansible.builtin.set_fact:
set_fact:
my_secrets: "{{ lookup('community.general.hashi_vault', 'secret/data/manysecrets', token=my_token_var, url='http://myvault_url:8200') }}"
- ansible.builtin.debug:
- debug:
msg: "{{ my_secrets['secret_key'] }}"
- ansible.builtin.debug:
- debug:
msg: "Secret '{{ item.key }}' has value '{{ item.value }}'"
loop: "{{ my_secrets | dict2items }}"
- name: return secrets as values only
ansible.builtin.debug:
debug:
msg: "A secret value: {{ item }}"
loop: "{{ query('community.general.hashi_vault', 'secret/data/manysecrets', token=my_token_var, url='http://myvault_url:8200', return_format='values') }}"
- name: return raw secret from API, including metadata
ansible.builtin.set_fact:
set_fact:
my_secret: "{{ lookup('community.general.hashi_vault', 'secret/data/hello:value', token=my_token_var, url='http://myvault_url:8200', as='raw') }}"
- ansible.builtin.debug:
- debug:
msg: "This is version {{ my_secret['metadata']['version'] }} of hello:value. The secret data is {{ my_secret['data']['data']['value'] }}"
# AWS IAM authentication method
# uses Ansible standard AWS options
- name: authenticate with aws_iam_login
ansible.builtin.debug:
debug:
msg: "{{ lookup('community.general.hashi_vault', 'secret/hello:value', auth_method='aws_iam_login' role_id='myroleid', profile=my_boto_profile) }}"
"""
@@ -248,8 +241,6 @@ RETURN = """
_raw:
description:
- secrets(s) requested
type: list
elements: dict
"""
import os
@@ -413,7 +404,7 @@ class HashiVault:
self.client.auth_ldap(**params)
def auth_approle(self):
params = self.get_options('role_id', 'secret_id', 'mount_point')
params = self.get_options('role_id', 'secret_id')
self.client.auth_approle(**params)
def auth_aws_iam_login(self):
@@ -541,11 +532,6 @@ class LookupModule(LookupBase):
def validate_auth_token(self, auth_method):
if auth_method == 'token':
if not self.get_option('token_path'):
# generally we want env vars defined in the spec, but in this case we want
# the env var HOME to have lower precedence than any other value source,
# including ini, so we're doing it here after all other processing has taken place
self.set_option('token_path', os.environ.get('HOME'))
if not self.get_option('token') and self.get_option('token_path'):
token_filename = os.path.join(
self.get_option('token_path'),

View File

@@ -18,7 +18,7 @@ DOCUMENTATION = '''
description:
- The list of keys to lookup on the Puppetmaster
type: list
elements: string
element_type: string
required: True
_bin_file:
description:
@@ -39,24 +39,20 @@ EXAMPLES = """
# All this examples depends on hiera.yml that describes the hierarchy
- name: "a value from Hiera 'DB'"
ansible.builtin.debug:
msg: "{{ lookup('community.general.hiera', 'foo') }}"
debug: msg={{ lookup('hiera', 'foo') }}
- name: "a value from a Hiera 'DB' on other environment"
ansible.builtin.debug:
msg: "{{ lookup('community.general.hiera', 'foo environment=production') }}"
debug: msg={{ lookup('hiera', 'foo environment=production') }}
- name: "a value from a Hiera 'DB' for a concrete node"
ansible.builtin.debug:
msg: "{{ lookup('community.general.hiera', 'foo fqdn=puppet01.localdomain') }}"
debug: msg={{ lookup('hiera', 'foo fqdn=puppet01.localdomain') }}
"""
RETURN = """
_raw:
description:
- a value associated with input key
type: list
elements: str
type: strings
"""
import os

View File

@@ -8,7 +8,7 @@ __metaclass__ = type
DOCUMENTATION = '''
lookup: keyring
author:
- Samuel Boucher (!UNKNOWN) <boucher.samuel.c@gmail.com>
- Samuel Boucher <boucher.samuel.c@gmail.com>
requirements:
- keyring (python library)
short_description: grab secrets from the OS keyring
@@ -18,20 +18,18 @@ DOCUMENTATION = '''
EXAMPLES = """
- name : output secrets to screen (BAD IDEA)
ansible.builtin.debug:
debug:
msg: "Password: {{item}}"
with_community.general.keyring:
with_keyring:
- 'servicename username'
- name: access mysql with password from keyring
mysql_db: login_password={{lookup('community.general.keyring','mysql joe')}} login_user=joe
mysql_db: login_password={{lookup('keyring','mysql joe')}} login_user=joe
"""
RETURN = """
_raw:
description: Secrets stored.
type: list
elements: str
description: secrets stored
"""
HAS_KEYRING = True

View File

@@ -7,7 +7,7 @@ __metaclass__ = type
DOCUMENTATION = '''
lookup: lastpass
author:
- Andrew Zenk (!UNKNOWN) <azenk@umn.edu>
- Andrew Zenk <azenk@umn.edu>
requirements:
- lpass (command line utility)
- must have already logged into lastpass
@@ -25,15 +25,13 @@ DOCUMENTATION = '''
EXAMPLES = """
- name: get 'custom_field' from lastpass entry 'entry-name'
ansible.builtin.debug:
msg: "{{ lookup('community.general.lastpass', 'entry-name', field='custom_field') }}"
debug:
msg: "{{ lookup('lastpass', 'entry-name', field='custom_field') }}"
"""
RETURN = """
_raw:
description: secrets stored
type: list
elements: str
"""
from subprocess import Popen, PIPE

View File

@@ -24,32 +24,30 @@ DOCUMENTATION = '''
EXAMPLES = """
- name: query LMDB for a list of country codes
ansible.builtin.debug:
msg: "{{ query('community.general.lmdb_kv', 'nl', 'be', 'lu', db='jp.mdb') }}"
debug:
msg: "{{ query('lmdb_kv', 'nl', 'be', 'lu', db='jp.mdb') }}"
- name: use list of values in a loop by key wildcard
ansible.builtin.debug:
debug:
msg: "Hello from {{ item.0 }} a.k.a. {{ item.1 }}"
vars:
- lmdb_kv_db: jp.mdb
with_community.general.lmdb_kv:
with_lmdb_kv:
- "n*"
- name: get an item by key
ansible.builtin.assert:
assert:
that:
- item == 'Belgium'
vars:
- lmdb_kv_db: jp.mdb
with_community.general.lmdb_kv:
with_lmdb_kv:
- be
"""
RETURN = """
_raw:
description: value(s) stored in LMDB
type: list
elements: raw
"""

View File

@@ -6,7 +6,7 @@ __metaclass__ = type
DOCUMENTATION = '''
author:
- Kyrylo Galanov (!UNKNOWN) <galanoff@gmail.com>
- Kyrylo Galanov (galanoff@gmail.com)
lookup: manifold
short_description: get credentials from Manifold.co
description:
@@ -40,14 +40,11 @@ DOCUMENTATION = '''
EXAMPLES = '''
- name: all available resources
ansible.builtin.debug:
msg: "{{ lookup('community.general.manifold', api_token='SecretToken') }}"
debug: msg="{{ lookup('manifold', api_token='SecretToken') }}"
- name: all available resources for a specific project in specific team
ansible.builtin.debug:
msg: "{{ lookup('community.general.manifold', api_token='SecretToken', project='poject-1', team='team-2') }}"
debug: msg="{{ lookup('manifold', api_token='SecretToken', project='poject-1', team='team-2') }}"
- name: two specific resources
ansible.builtin.debug:
msg: "{{ lookup('community.general.manifold', 'resource-1', 'resource-2') }}"
debug: msg="{{ lookup('manifold', 'resource-1', 'resource-2') }}"
'''
RETURN = '''

View File

@@ -22,7 +22,6 @@ __metaclass__ = type
DOCUMENTATION = '''
---
author: Unknown (!UNKNOWN)
lookup: nios
short_description: Query Infoblox NIOS objects
description:
@@ -48,14 +47,12 @@ options:
EXAMPLES = """
- name: fetch all networkview objects
ansible.builtin.set_fact:
networkviews: "{{ lookup('community.general.nios', 'networkview',
provider={'host': 'nios01', 'username': 'admin', 'password': 'password'}) }}"
set_fact:
networkviews: "{{ lookup('nios', 'networkview', provider={'host': 'nios01', 'username': 'admin', 'password': 'password'}) }}"
- name: fetch the default dns view
ansible.builtin.set_fact:
dns_views: "{{ lookup('community.general.nios', 'view', filter={'name': 'default'},
provider={'host': 'nios01', 'username': 'admin', 'password': 'password'}) }}"
set_fact:
dns_views: "{{ lookup('nios', 'view', filter={'name': 'default'}, provider={'host': 'nios01', 'username': 'admin', 'password': 'password'}) }}"
# all of the examples below use credentials that are set using env variables
# export INFOBLOX_HOST=nios01
@@ -63,28 +60,29 @@ EXAMPLES = """
# export INFOBLOX_PASSWORD=admin
- name: fetch all host records and include extended attributes
ansible.builtin.set_fact:
host_records: "{{ lookup('community.general.nios', 'record:host', return_fields=['extattrs', 'name', 'view', 'comment']}) }}"
set_fact:
host_records: "{{ lookup('nios', 'record:host', return_fields=['extattrs', 'name', 'view', 'comment']}) }}"
- name: use env variables to pass credentials
ansible.builtin.set_fact:
networkviews: "{{ lookup('community.general.nios', 'networkview') }}"
set_fact:
networkviews: "{{ lookup('nios', 'networkview') }}"
- name: get a host record
ansible.builtin.set_fact:
host: "{{ lookup('community.general.nios', 'record:host', filter={'name': 'hostname.ansible.com'}) }}"
set_fact:
host: "{{ lookup('nios', 'record:host', filter={'name': 'hostname.ansible.com'}) }}"
- name: get the authoritative zone from a non default dns view
ansible.builtin.set_fact:
host: "{{ lookup('community.general.nios', 'zone_auth', filter={'fqdn': 'ansible.com', 'view': 'ansible-dns'}) }}"
set_fact:
host: "{{ lookup('nios', 'zone_auth', filter={'fqdn': 'ansible.com', 'view': 'ansible-dns'}) }}"
"""
RETURN = """
obj_type:
description:
- The object type specified in the terms argument
type: dictionary
returned: always
type: complex
contains:
obj_field:
description:

View File

@@ -22,7 +22,6 @@ __metaclass__ = type
DOCUMENTATION = '''
---
author: Unknown (!UNKNOWN)
lookup: nios_next_ip
short_description: Return the next available IP address for a network
description:
@@ -48,16 +47,16 @@ options:
EXAMPLES = """
- name: return next available IP address for network 192.168.10.0/24
ansible.builtin.set_fact:
ipaddr: "{{ lookup('community.general.nios_next_ip', '192.168.10.0/24', provider={'host': 'nios01', 'username': 'admin', 'password': 'password'}) }}"
set_fact:
ipaddr: "{{ lookup('nios_next_ip', '192.168.10.0/24', provider={'host': 'nios01', 'username': 'admin', 'password': 'password'}) }}"
- name: return the next 3 available IP addresses for network 192.168.10.0/24
ansible.builtin.set_fact:
ipaddr: "{{ lookup('community.general.nios_next_ip', '192.168.10.0/24', num=3, provider={'host': 'nios01', 'username': 'admin', 'password': 'password'}) }}"
set_fact:
ipaddr: "{{ lookup('nios_next_ip', '192.168.10.0/24', num=3, provider={'host': 'nios01', 'username': 'admin', 'password': 'password'}) }}"
- name: return the next 3 available IP addresses for network 192.168.10.0/24 excluding ip addresses - ['192.168.10.1', '192.168.10.2']
ansible.builtin.set_fact:
ipaddr: "{{ lookup('community.general.nios_next_ip', '192.168.10.0/24', num=3, exclude=['192.168.10.1', '192.168.10.2'],
set_fact:
ipaddr: "{{ lookup('nios_next_ip', '192.168.10.0/24', num=3, exclude=['192.168.10.1', '192.168.10.2'],
provider={'host': 'nios01', 'username': 'admin', 'password': 'password'}) }}"
"""
@@ -65,6 +64,7 @@ RETURN = """
_list:
description:
- The list of next IP addresses available
returned: always
type: list
"""

View File

@@ -22,7 +22,6 @@ __metaclass__ = type
DOCUMENTATION = '''
---
author: Unknown (!UNKNOWN)
lookup: nios_next_network
short_description: Return the next available network range for a network-container
description:
@@ -56,18 +55,17 @@ options:
EXAMPLES = """
- name: return next available network for network-container 192.168.10.0/24
ansible.builtin.set_fact:
networkaddr: "{{ lookup('community.general.nios_next_network', '192.168.10.0/24', cidr=25,
provider={'host': 'nios01', 'username': 'admin', 'password': 'password'}) }}"
set_fact:
networkaddr: "{{ lookup('nios_next_network', '192.168.10.0/24', cidr=25, provider={'host': 'nios01', 'username': 'admin', 'password': 'password'}) }}"
- name: return the next 2 available network addresses for network-container 192.168.10.0/24
ansible.builtin.set_fact:
networkaddr: "{{ lookup('community.general.nios_next_network', '192.168.10.0/24', cidr=25, num=2,
set_fact:
networkaddr: "{{ lookup('nios_next_network', '192.168.10.0/24', cidr=25, num=2,
provider={'host': 'nios01', 'username': 'admin', 'password': 'password'}) }}"
- name: return the available network addresses for network-container 192.168.10.0/24 excluding network range '192.168.10.0/25'
ansible.builtin.set_fact:
networkaddr: "{{ lookup('community.general.nios_next_network', '192.168.10.0/24', cidr=25, exclude=['192.168.10.0/25'],
set_fact:
networkaddr: "{{ lookup('nios_next_network', '192.168.10.0/24', cidr=25, exclude=['192.168.10.0/25'],
provider={'host': 'nios01', 'username': 'admin', 'password': 'password'}) }}"
"""
@@ -75,6 +73,7 @@ RETURN = """
_list:
description:
- The list of next network addresses available
returned: always
type: list
"""

View File

@@ -55,27 +55,27 @@ DOCUMENTATION = '''
EXAMPLES = """
# These examples only work when already signed in to 1Password
- name: Retrieve password for KITT when already signed in to 1Password
ansible.builtin.debug:
var: lookup('community.general.onepassword', 'KITT')
debug:
var: lookup('onepassword', 'KITT')
- name: Retrieve password for Wintermute when already signed in to 1Password
ansible.builtin.debug:
var: lookup('community.general.onepassword', 'Tessier-Ashpool', section='Wintermute')
debug:
var: lookup('onepassword', 'Tessier-Ashpool', section='Wintermute')
- name: Retrieve username for HAL when already signed in to 1Password
ansible.builtin.debug:
var: lookup('community.general.onepassword', 'HAL 9000', field='username', vault='Discovery')
debug:
var: lookup('onepassword', 'HAL 9000', field='username', vault='Discovery')
- name: Retrieve password for HAL when not signed in to 1Password
ansible.builtin.debug:
var: lookup('community.general.onepassword'
debug:
var: lookup('onepassword'
'HAL 9000'
subdomain='Discovery'
master_password=vault_master_password)
- name: Retrieve password for HAL when never signed in to 1Password
ansible.builtin.debug:
var: lookup('community.general.onepassword'
debug:
var: lookup('onepassword'
'HAL 9000'
subdomain='Discovery'
master_password=vault_master_password
@@ -86,8 +86,6 @@ EXAMPLES = """
RETURN = """
_raw:
description: field data requested
type: list
elements: str
"""
import errno

View File

@@ -51,19 +51,17 @@ DOCUMENTATION = '''
EXAMPLES = """
- name: Retrieve all data about Wintermute
ansible.builtin.debug:
var: lookup('community.general.onepassword_raw', 'Wintermute')
debug:
var: lookup('onepassword_raw', 'Wintermute')
- name: Retrieve all data about Wintermute when not signed in to 1Password
ansible.builtin.debug:
var: lookup('community.general.onepassword_raw', 'Wintermute', subdomain='Turing', vault_password='DmbslfLvasjdl')
debug:
var: lookup('onepassword_raw', 'Wintermute', subdomain='Turing', vault_password='DmbslfLvasjdl')
"""
RETURN = """
_raw:
description: field data requested
type: list
elements: dict
"""
import json

View File

@@ -8,7 +8,7 @@ __metaclass__ = type
DOCUMENTATION = '''
lookup: passwordstore
author:
- Patrick Deelman (!UNKNOWN) <patrick@patrickdeelman.nl>
- Patrick Deelman <patrick@patrickdeelman.nl>
short_description: manage passwords with passwordstore.org's pass utility
description:
- Enables Ansible to retrieve, create or update passwords from the passwordstore.org pass utility.
@@ -57,42 +57,39 @@ DOCUMENTATION = '''
EXAMPLES = """
# Debug is used for examples, BAD IDEA to show passwords on screen
- name: Basic lookup. Fails if example/test doesn't exist
ansible.builtin.debug:
msg: "{{ lookup('community.general.passwordstore', 'example/test')}}"
debug:
msg: "{{ lookup('passwordstore', 'example/test')}}"
- name: Create pass with random 16 character password. If password exists just give the password
ansible.builtin.debug:
debug:
var: mypassword
vars:
mypassword: "{{ lookup('community.general.passwordstore', 'example/test create=true')}}"
mypassword: "{{ lookup('passwordstore', 'example/test create=true')}}"
- name: Different size password
ansible.builtin.debug:
msg: "{{ lookup('community.general.passwordstore', 'example/test create=true length=42')}}"
debug:
msg: "{{ lookup('passwordstore', 'example/test create=true length=42')}}"
- name: Create password and overwrite the password if it exists. As a bonus, this module includes the old password inside the pass file
ansible.builtin.debug:
msg: "{{ lookup('community.general.passwordstore', 'example/test create=true overwrite=true')}}"
debug:
msg: "{{ lookup('passwordstore', 'example/test create=true overwrite=true')}}"
- name: Create an alphanumeric password
ansible.builtin.debug:
msg: "{{ lookup('community.general.passwordstore', 'example/test create=true nosymbols=true') }}"
debug: msg="{{ lookup('passwordstore', 'example/test create=true nosymbols=true') }}"
- name: Return the value for user in the KV pair user, username
ansible.builtin.debug:
msg: "{{ lookup('community.general.passwordstore', 'example/test subkey=user')}}"
debug:
msg: "{{ lookup('passwordstore', 'example/test subkey=user')}}"
- name: Return the entire password file content
ansible.builtin.set_fact:
passfilecontent: "{{ lookup('community.general.passwordstore', 'example/test returnall=true')}}"
set_fact:
passfilecontent: "{{ lookup('passwordstore', 'example/test returnall=true')}}"
"""
RETURN = """
_raw:
description:
- a password
type: list
elements: str
"""
import os

View File

@@ -8,7 +8,7 @@ DOCUMENTATION = '''
lookup: redis
author:
- Jan-Piet Mens (@jpmens) <jpmens(at)gmail.com>
- Ansible Core Team
- Ansible Core
short_description: fetch data from Redis
description:
- This lookup returns a list of results from a Redis DB corresponding to a list of items given to it
@@ -46,29 +46,23 @@ DOCUMENTATION = '''
EXAMPLES = """
- name: query redis for somekey (default or configured settings used)
ansible.builtin.debug:
msg: "{{ lookup('community.general.redis', 'somekey') }}"
debug: msg="{{ lookup('redis', 'somekey') }}"
- name: query redis for list of keys and non-default host and port
ansible.builtin.debug:
msg: "{{ lookup('community.general.redis', item, host='myredis.internal.com', port=2121) }}"
debug: msg="{{ lookup('redis', item, host='myredis.internal.com', port=2121) }}"
loop: '{{list_of_redis_keys}}'
- name: use list directly
ansible.builtin.debug:
msg: "{{ lookup('community.general.redis', 'key1', 'key2', 'key3') }}"
debug: msg="{{ lookup('redis', 'key1', 'key2', 'key3') }}"
- name: use list directly with a socket
ansible.builtin.debug:
msg: "{{ lookup('community.general.redis', 'key1', 'key2', socket='/var/tmp/redis.sock') }}"
debug: msg="{{ lookup('redis', 'key1', 'key2', socket='/var/tmp/redis.sock') }}"
"""
RETURN = """
_raw:
description: value(s) stored in Redis
type: list
elements: str
"""
import os

Some files were not shown because too many files have changed in this diff Show More