mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-04-29 18:06:53 +00:00
Compare commits
197 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
eac1dfdc78 | ||
|
|
930b64226c | ||
|
|
885bb73cc6 | ||
|
|
6afde82f2c | ||
|
|
62d53eb3cf | ||
|
|
1ae9bcc2dd | ||
|
|
1533b5b244 | ||
|
|
99295fac75 | ||
|
|
434ff80ec6 | ||
|
|
be6d3e9aa1 | ||
|
|
b06d46f4d1 | ||
|
|
acea90ceec | ||
|
|
034d09532e | ||
|
|
f815bef3d1 | ||
|
|
e205494c8c | ||
|
|
1541eecd0e | ||
|
|
6cd87580da | ||
|
|
d227ace4a0 | ||
|
|
e7770b9132 | ||
|
|
d0f1d9efd9 | ||
|
|
22f0747e03 | ||
|
|
2ee7de681a | ||
|
|
2e20e9bb8f | ||
|
|
9d9e2bd797 | ||
|
|
d9aa9e8021 | ||
|
|
a9eeced6d7 | ||
|
|
066b306deb | ||
|
|
7426c3839e | ||
|
|
4d6735bebf | ||
|
|
1ceed02048 | ||
|
|
6dc31b13c3 | ||
|
|
c7899e384a | ||
|
|
35c8bbec8a | ||
|
|
87c37ea441 | ||
|
|
0f7e39fa1a | ||
|
|
cde48c3c03 | ||
|
|
778c91caa7 | ||
|
|
61258c9216 | ||
|
|
99e0f8a3a0 | ||
|
|
2dd159493b | ||
|
|
28caeff7bd | ||
|
|
76cf21a05b | ||
|
|
64248acce6 | ||
|
|
8ed5beb978 | ||
|
|
9b7194be53 | ||
|
|
984d9d91b8 | ||
|
|
071d89acac | ||
|
|
afc620fc74 | ||
|
|
acae2a11aa | ||
|
|
f17690e7d0 | ||
|
|
4ca716a1cf | ||
|
|
92f1a33d80 | ||
|
|
c9e07d19d8 | ||
|
|
c7bffaf270 | ||
|
|
1b0f4fdd28 | ||
|
|
02ea90f680 | ||
|
|
6d08dcdef3 | ||
|
|
8dd00a2b9b | ||
|
|
a090e2ff85 | ||
|
|
ca0b1efa5b | ||
|
|
b2f01f4c20 | ||
|
|
81b390b7dc | ||
|
|
9c7eed43a8 | ||
|
|
6999881985 | ||
|
|
d746293884 | ||
|
|
7b2853d9aa | ||
|
|
e05e7babbe | ||
|
|
01773c5338 | ||
|
|
e46e6e4dd0 | ||
|
|
270e3df416 | ||
|
|
d9524bae93 | ||
|
|
77fc407a73 | ||
|
|
2d73089ddc | ||
|
|
7a185cef08 | ||
|
|
6158b5f56b | ||
|
|
1b05e03384 | ||
|
|
0f0eb53efa | ||
|
|
6b58e784af | ||
|
|
7d644ef3d4 | ||
|
|
9db69a62b2 | ||
|
|
1c23ab8d44 | ||
|
|
96c5ceee97 | ||
|
|
768512645d | ||
|
|
66656abe17 | ||
|
|
022a7834df | ||
|
|
09de2dfd77 | ||
|
|
bb910f6aa1 | ||
|
|
56d8554b70 | ||
|
|
4fa140d896 | ||
|
|
5fc3f9c766 | ||
|
|
0f53fba20a | ||
|
|
d6c3661e3e | ||
|
|
b18c88248b | ||
|
|
fd5e05cc77 | ||
|
|
56a1d3ffd6 | ||
|
|
f1477ec8db | ||
|
|
2fb1dc0cf7 | ||
|
|
b9b4837d72 | ||
|
|
9ea8f41ebb | ||
|
|
b1fe3e34f3 | ||
|
|
841286444e | ||
|
|
4c13f10a05 | ||
|
|
9b844fc8d5 | ||
|
|
92514ee143 | ||
|
|
6621eb8b87 | ||
|
|
f4b4a2813a | ||
|
|
6f2cb85fae | ||
|
|
5cdc70bda9 | ||
|
|
89498d3650 | ||
|
|
c553351563 | ||
|
|
72c1a17bd9 | ||
|
|
694584f907 | ||
|
|
73e2c2eb85 | ||
|
|
f3ddc8757d | ||
|
|
9241b853c0 | ||
|
|
1053b3c658 | ||
|
|
d9daa6b851 | ||
|
|
a876fa0262 | ||
|
|
f64ace97af | ||
|
|
b701b5893f | ||
|
|
24667e12d0 | ||
|
|
9d93760564 | ||
|
|
ec78558559 | ||
|
|
d5c8d7ddcc | ||
|
|
6338048c73 | ||
|
|
92b388817f | ||
|
|
c72b337327 | ||
|
|
e5080b7847 | ||
|
|
079925fe66 | ||
|
|
19a87874f7 | ||
|
|
809cdda9ef | ||
|
|
bec6f732ad | ||
|
|
d2cdca416c | ||
|
|
0f1ccc07c5 | ||
|
|
deb1071666 | ||
|
|
eb9c5eb796 | ||
|
|
5c8504323e | ||
|
|
ab391c2cfa | ||
|
|
a14b525bdc | ||
|
|
996ef6ab49 | ||
|
|
055c8dac9c | ||
|
|
f4a9c7cc8b | ||
|
|
0c1f96290a | ||
|
|
d260f7ffda | ||
|
|
35d81adabf | ||
|
|
10a61c9dc3 | ||
|
|
6f47bcc399 | ||
|
|
7140b456ae | ||
|
|
8c67a5bda9 | ||
|
|
4ae436a8cc | ||
|
|
5f5c07a942 | ||
|
|
1cef1359d0 | ||
|
|
0d28bfb67e | ||
|
|
ef304ed824 | ||
|
|
bf17f289b3 | ||
|
|
0eff87d0be | ||
|
|
f00fabfa48 | ||
|
|
426cbafa06 | ||
|
|
93fe1f9a3e | ||
|
|
4e944772d5 | ||
|
|
50abeee579 | ||
|
|
eccc8d88b6 | ||
|
|
6d2d364a00 | ||
|
|
e781dd3c9b | ||
|
|
362f899a99 | ||
|
|
b44f6b8114 | ||
|
|
53a145ecb0 | ||
|
|
b22b44088f | ||
|
|
e0a1aa2f46 | ||
|
|
53e7e48834 | ||
|
|
62e3a2ed2f | ||
|
|
ecede6ca99 | ||
|
|
e1ac1fa6db | ||
|
|
81cef0bd05 | ||
|
|
a2bb118e95 | ||
|
|
bf9bcd9bb4 | ||
|
|
9bfd61e117 | ||
|
|
ca81a5cf2f | ||
|
|
853dd21eab | ||
|
|
6f267d8f35 | ||
|
|
1f975eff56 | ||
|
|
0ca922248f | ||
|
|
ef7ade6a56 | ||
|
|
d721283846 | ||
|
|
af410f5572 | ||
|
|
442dabbcc6 | ||
|
|
bbb155409e | ||
|
|
a83556af80 | ||
|
|
13a5e5a1ba | ||
|
|
466bd89bd4 | ||
|
|
bd4d5fe9db | ||
|
|
cf889faf42 | ||
|
|
ea313503dd | ||
|
|
57fa6526c4 | ||
|
|
ae4bee2627 | ||
|
|
87000ae491 | ||
|
|
46e221cbc6 |
@@ -13,13 +13,25 @@ pr:
|
|||||||
- stable-*
|
- stable-*
|
||||||
|
|
||||||
schedules:
|
schedules:
|
||||||
- cron: 0 9 * * *
|
- cron: 0 8 * * *
|
||||||
displayName: Nightly
|
displayName: Nightly (main)
|
||||||
always: true
|
always: true
|
||||||
branches:
|
branches:
|
||||||
include:
|
include:
|
||||||
- main
|
- main
|
||||||
- stable-*
|
- cron: 0 10 * * *
|
||||||
|
displayName: Nightly (active stable branches)
|
||||||
|
always: true
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- stable-2
|
||||||
|
- stable-3
|
||||||
|
- cron: 0 11 * * 0
|
||||||
|
displayName: Weekly (old stable branches)
|
||||||
|
always: true
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- stable-1
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
- name: checkoutPath
|
- name: checkoutPath
|
||||||
@@ -36,7 +48,7 @@ variables:
|
|||||||
resources:
|
resources:
|
||||||
containers:
|
containers:
|
||||||
- container: default
|
- container: default
|
||||||
image: quay.io/ansible/azure-pipelines-test-container:1.8.0
|
image: quay.io/ansible/azure-pipelines-test-container:1.9.0
|
||||||
|
|
||||||
pool: Standard
|
pool: Standard
|
||||||
|
|
||||||
@@ -56,6 +68,19 @@ stages:
|
|||||||
- test: 3
|
- test: 3
|
||||||
- test: 4
|
- test: 4
|
||||||
- test: extra
|
- test: extra
|
||||||
|
- stage: Sanity_2_11
|
||||||
|
displayName: Sanity 2.11
|
||||||
|
dependsOn: []
|
||||||
|
jobs:
|
||||||
|
- template: templates/matrix.yml
|
||||||
|
parameters:
|
||||||
|
nameFormat: Test {0}
|
||||||
|
testFormat: 2.11/sanity/{0}
|
||||||
|
targets:
|
||||||
|
- test: 1
|
||||||
|
- test: 2
|
||||||
|
- test: 3
|
||||||
|
- test: 4
|
||||||
- stage: Sanity_2_10
|
- stage: Sanity_2_10
|
||||||
displayName: Sanity 2.10
|
displayName: Sanity 2.10
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
@@ -99,6 +124,23 @@ stages:
|
|||||||
- test: 3.7
|
- test: 3.7
|
||||||
- test: 3.8
|
- test: 3.8
|
||||||
- test: 3.9
|
- test: 3.9
|
||||||
|
- test: '3.10'
|
||||||
|
- stage: Units_2_11
|
||||||
|
displayName: Units 2.11
|
||||||
|
dependsOn: []
|
||||||
|
jobs:
|
||||||
|
- template: templates/matrix.yml
|
||||||
|
parameters:
|
||||||
|
nameFormat: Python {0}
|
||||||
|
testFormat: 2.11/units/{0}/1
|
||||||
|
targets:
|
||||||
|
- test: 2.6
|
||||||
|
- test: 2.7
|
||||||
|
- test: 3.5
|
||||||
|
- test: 3.6
|
||||||
|
- test: 3.7
|
||||||
|
- test: 3.8
|
||||||
|
- test: 3.9
|
||||||
- stage: Units_2_10
|
- stage: Units_2_10
|
||||||
displayName: Units 2.10
|
displayName: Units 2.10
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
@@ -146,14 +188,33 @@ stages:
|
|||||||
test: rhel/7.9
|
test: rhel/7.9
|
||||||
- name: RHEL 8.3
|
- name: RHEL 8.3
|
||||||
test: rhel/8.3
|
test: rhel/8.3
|
||||||
- name: FreeBSD 11.4
|
- name: FreeBSD 12.2
|
||||||
test: freebsd/11.4
|
test: freebsd/12.2
|
||||||
|
- name: FreeBSD 13.0
|
||||||
|
test: freebsd/13.0
|
||||||
|
groups:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
- stage: Remote_2_11
|
||||||
|
displayName: Remote 2.11
|
||||||
|
dependsOn: []
|
||||||
|
jobs:
|
||||||
|
- template: templates/matrix.yml
|
||||||
|
parameters:
|
||||||
|
testFormat: 2.11/{0}
|
||||||
|
targets:
|
||||||
|
- name: macOS 11.1
|
||||||
|
test: macos/11.1
|
||||||
|
- name: RHEL 7.9
|
||||||
|
test: rhel/7.9
|
||||||
|
- name: RHEL 8.3
|
||||||
|
test: rhel/8.3
|
||||||
- name: FreeBSD 12.2
|
- name: FreeBSD 12.2
|
||||||
test: freebsd/12.2
|
test: freebsd/12.2
|
||||||
groups:
|
groups:
|
||||||
- 1
|
- 1
|
||||||
- 2
|
- 2
|
||||||
- 3
|
|
||||||
- stage: Remote_2_10
|
- stage: Remote_2_10
|
||||||
displayName: Remote 2.10
|
displayName: Remote 2.10
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
@@ -208,10 +269,10 @@ stages:
|
|||||||
test: centos7
|
test: centos7
|
||||||
- name: CentOS 8
|
- name: CentOS 8
|
||||||
test: centos8
|
test: centos8
|
||||||
- name: Fedora 32
|
|
||||||
test: fedora32
|
|
||||||
- name: Fedora 33
|
- name: Fedora 33
|
||||||
test: fedora33
|
test: fedora33
|
||||||
|
- name: Fedora 34
|
||||||
|
test: fedora34
|
||||||
- name: openSUSE 15 py2
|
- name: openSUSE 15 py2
|
||||||
test: opensuse15py2
|
test: opensuse15py2
|
||||||
- name: openSUSE 15 py3
|
- name: openSUSE 15 py3
|
||||||
@@ -224,6 +285,25 @@ stages:
|
|||||||
- 1
|
- 1
|
||||||
- 2
|
- 2
|
||||||
- 3
|
- 3
|
||||||
|
- stage: Docker_2_11
|
||||||
|
displayName: Docker 2.11
|
||||||
|
dependsOn: []
|
||||||
|
jobs:
|
||||||
|
- template: templates/matrix.yml
|
||||||
|
parameters:
|
||||||
|
testFormat: 2.11/linux/{0}
|
||||||
|
targets:
|
||||||
|
- name: CentOS 8
|
||||||
|
test: centos8
|
||||||
|
- name: Fedora 33
|
||||||
|
test: fedora33
|
||||||
|
- name: openSUSE 15 py3
|
||||||
|
test: opensuse15
|
||||||
|
- name: Ubuntu 20.04
|
||||||
|
test: ubuntu2004
|
||||||
|
groups:
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
- stage: Docker_2_10
|
- stage: Docker_2_10
|
||||||
displayName: Docker 2.10
|
displayName: Docker 2.10
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
@@ -270,6 +350,16 @@ stages:
|
|||||||
parameters:
|
parameters:
|
||||||
nameFormat: Python {0}
|
nameFormat: Python {0}
|
||||||
testFormat: devel/cloud/{0}/1
|
testFormat: devel/cloud/{0}/1
|
||||||
|
targets:
|
||||||
|
- test: 3.8
|
||||||
|
- stage: Cloud_2_11
|
||||||
|
displayName: Cloud 2.11
|
||||||
|
dependsOn: []
|
||||||
|
jobs:
|
||||||
|
- template: templates/matrix.yml
|
||||||
|
parameters:
|
||||||
|
nameFormat: Python {0}
|
||||||
|
testFormat: 2.11/cloud/{0}/1
|
||||||
targets:
|
targets:
|
||||||
- test: 2.7
|
- test: 2.7
|
||||||
- test: 3.6
|
- test: 3.6
|
||||||
@@ -299,17 +389,22 @@ stages:
|
|||||||
- Sanity_devel
|
- Sanity_devel
|
||||||
- Sanity_2_9
|
- Sanity_2_9
|
||||||
- Sanity_2_10
|
- Sanity_2_10
|
||||||
|
- Sanity_2_11
|
||||||
- Units_devel
|
- Units_devel
|
||||||
- Units_2_9
|
- Units_2_9
|
||||||
- Units_2_10
|
- Units_2_10
|
||||||
|
- Units_2_11
|
||||||
- Remote_devel
|
- Remote_devel
|
||||||
- Remote_2_9
|
- Remote_2_9
|
||||||
- Remote_2_10
|
- Remote_2_10
|
||||||
|
- Remote_2_11
|
||||||
- Docker_devel
|
- Docker_devel
|
||||||
- Docker_2_9
|
- Docker_2_9
|
||||||
- Docker_2_10
|
- Docker_2_10
|
||||||
|
- Docker_2_11
|
||||||
- Cloud_devel
|
- Cloud_devel
|
||||||
- Cloud_2_9
|
- Cloud_2_9
|
||||||
- Cloud_2_10
|
- Cloud_2_10
|
||||||
|
- Cloud_2_11
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/coverage.yml
|
- template: templates/coverage.yml
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ set -o pipefail -eu
|
|||||||
|
|
||||||
output_path="$1"
|
output_path="$1"
|
||||||
|
|
||||||
curl --silent --show-error https://codecov.io/bash > codecov.sh
|
curl --silent --show-error https://ansible-ci-files.s3.us-east-1.amazonaws.com/codecov/codecov.sh > codecov.sh
|
||||||
|
|
||||||
for file in "${output_path}"/reports/coverage*.xml; do
|
for file in "${output_path}"/reports/coverage*.xml; do
|
||||||
name="${file}"
|
name="${file}"
|
||||||
|
|||||||
65
.github/BOTMETA.yml
vendored
65
.github/BOTMETA.yml
vendored
@@ -4,17 +4,16 @@ files:
|
|||||||
support: community
|
support: community
|
||||||
$actions:
|
$actions:
|
||||||
labels: action
|
labels: action
|
||||||
$actions/aireos.py:
|
$actions/system/iptables_state.py:
|
||||||
labels: aireos cisco networking
|
maintainers: quidame
|
||||||
$actions/ironware.py:
|
$actions/system/shutdown.py:
|
||||||
maintainers: paulquack
|
|
||||||
labels: ironware networking
|
|
||||||
$actions/shutdown.py:
|
|
||||||
maintainers: nitzmahone samdoran aminvakil
|
maintainers: nitzmahone samdoran aminvakil
|
||||||
$becomes/:
|
$becomes/:
|
||||||
labels: become
|
labels: become
|
||||||
$callbacks/:
|
$callbacks/:
|
||||||
labels: callbacks
|
labels: callbacks
|
||||||
|
$callbacks/loganalytics.py:
|
||||||
|
maintainers: zhcli
|
||||||
$callbacks/logstash.py:
|
$callbacks/logstash.py:
|
||||||
maintainers: ujenmr
|
maintainers: ujenmr
|
||||||
$callbacks/say.py:
|
$callbacks/say.py:
|
||||||
@@ -53,14 +52,22 @@ files:
|
|||||||
$doc_fragments/xenserver.py:
|
$doc_fragments/xenserver.py:
|
||||||
maintainers: bvitnik
|
maintainers: bvitnik
|
||||||
labels: xenserver
|
labels: xenserver
|
||||||
|
$filters/dict.py:
|
||||||
|
maintainers: felixfontein
|
||||||
$filters/dict_kv.py:
|
$filters/dict_kv.py:
|
||||||
maintainers: giner
|
maintainers: giner
|
||||||
|
$filters/from_csv.py:
|
||||||
|
maintainers: Ajpantuso
|
||||||
$filters/jc.py:
|
$filters/jc.py:
|
||||||
maintainers: kellyjonbrazil
|
maintainers: kellyjonbrazil
|
||||||
$filters/list.py:
|
$filters/list.py:
|
||||||
maintainers: vbotka
|
maintainers: vbotka
|
||||||
|
$filters/path_join_shim.py:
|
||||||
|
maintainers: felixfontein
|
||||||
$filters/time.py:
|
$filters/time.py:
|
||||||
maintainers: resmo
|
maintainers: resmo
|
||||||
|
$filters/version_sort.py:
|
||||||
|
maintainers: ericzolf
|
||||||
$httpapis/:
|
$httpapis/:
|
||||||
maintainers: $team_networking
|
maintainers: $team_networking
|
||||||
labels: networking
|
labels: networking
|
||||||
@@ -74,6 +81,8 @@ files:
|
|||||||
maintainers: $team_linode
|
maintainers: $team_linode
|
||||||
labels: cloud linode
|
labels: cloud linode
|
||||||
keywords: linode dynamic inventory script
|
keywords: linode dynamic inventory script
|
||||||
|
$inventories/proxmox.py:
|
||||||
|
maintainers: $team_virt ilijamt
|
||||||
$inventories/scaleway.py:
|
$inventories/scaleway.py:
|
||||||
maintainers: $team_scaleway
|
maintainers: $team_scaleway
|
||||||
labels: cloud scaleway
|
labels: cloud scaleway
|
||||||
@@ -175,6 +184,8 @@ files:
|
|||||||
maintainers: zbal
|
maintainers: zbal
|
||||||
$modules/cloud/lxc/lxc_container.py:
|
$modules/cloud/lxc/lxc_container.py:
|
||||||
maintainers: cloudnull
|
maintainers: cloudnull
|
||||||
|
$modules/cloud/lxc/lxc_profile.py:
|
||||||
|
maintainers: conloos
|
||||||
$modules/cloud/lxd/:
|
$modules/cloud/lxd/:
|
||||||
ignore: hnakamur
|
ignore: hnakamur
|
||||||
$modules/cloud/memset/:
|
$modules/cloud/memset/:
|
||||||
@@ -210,7 +221,7 @@ files:
|
|||||||
$modules/cloud/misc/:
|
$modules/cloud/misc/:
|
||||||
ignore: ryansb
|
ignore: ryansb
|
||||||
$modules/cloud/misc/terraform.py:
|
$modules/cloud/misc/terraform.py:
|
||||||
maintainers: m-yosefpor
|
maintainers: m-yosefpor rainerleber
|
||||||
$modules/cloud/misc/xenserver_facts.py:
|
$modules/cloud/misc/xenserver_facts.py:
|
||||||
maintainers: caphrim007 cheese
|
maintainers: caphrim007 cheese
|
||||||
labels: xenserver_facts
|
labels: xenserver_facts
|
||||||
@@ -294,6 +305,7 @@ files:
|
|||||||
maintainers: bvitnik
|
maintainers: bvitnik
|
||||||
$modules/clustering/consul/:
|
$modules/clustering/consul/:
|
||||||
maintainers: $team_consul
|
maintainers: $team_consul
|
||||||
|
ignore: colin-nolan
|
||||||
$modules/clustering/etcd3.py:
|
$modules/clustering/etcd3.py:
|
||||||
maintainers: evrardjp
|
maintainers: evrardjp
|
||||||
ignore: vfauth
|
ignore: vfauth
|
||||||
@@ -418,6 +430,8 @@ files:
|
|||||||
maintainers: andsens
|
maintainers: andsens
|
||||||
$modules/monitoring/spectrum_device.py:
|
$modules/monitoring/spectrum_device.py:
|
||||||
maintainers: orgito
|
maintainers: orgito
|
||||||
|
$modules/monitoring/spectrum_model_attrs.py:
|
||||||
|
maintainers: tgates81
|
||||||
$modules/monitoring/stackdriver.py:
|
$modules/monitoring/stackdriver.py:
|
||||||
maintainers: bwhaley
|
maintainers: bwhaley
|
||||||
$modules/monitoring/statsd.py:
|
$modules/monitoring/statsd.py:
|
||||||
@@ -434,7 +448,7 @@ files:
|
|||||||
$modules/net_tools/dnsmadeeasy.py:
|
$modules/net_tools/dnsmadeeasy.py:
|
||||||
maintainers: briceburg
|
maintainers: briceburg
|
||||||
$modules/net_tools/haproxy.py:
|
$modules/net_tools/haproxy.py:
|
||||||
maintainers: ravibhure
|
maintainers: ravibhure Normo
|
||||||
$modules/net_tools/:
|
$modules/net_tools/:
|
||||||
maintainers: nerzhul
|
maintainers: nerzhul
|
||||||
$modules/net_tools/infinity/infinity.py:
|
$modules/net_tools/infinity/infinity.py:
|
||||||
@@ -544,7 +558,8 @@ files:
|
|||||||
$modules/packaging/language/bundler.py:
|
$modules/packaging/language/bundler.py:
|
||||||
maintainers: thoiberg
|
maintainers: thoiberg
|
||||||
$modules/packaging/language/composer.py:
|
$modules/packaging/language/composer.py:
|
||||||
maintainers: dmtrs resmo
|
maintainers: dmtrs
|
||||||
|
ignore: resmo
|
||||||
$modules/packaging/language/cpanm.py:
|
$modules/packaging/language/cpanm.py:
|
||||||
maintainers: fcuny
|
maintainers: fcuny
|
||||||
$modules/packaging/language/easy_install.py:
|
$modules/packaging/language/easy_install.py:
|
||||||
@@ -691,7 +706,9 @@ files:
|
|||||||
labels: zypper
|
labels: zypper
|
||||||
ignore: dirtyharrycallahan robinro
|
ignore: dirtyharrycallahan robinro
|
||||||
$modules/packaging/os/zypper_repository.py:
|
$modules/packaging/os/zypper_repository.py:
|
||||||
maintainers: matze
|
maintainers: $team_suse
|
||||||
|
labels: zypper
|
||||||
|
ignore: matze
|
||||||
$modules/remote_management/cobbler/:
|
$modules/remote_management/cobbler/:
|
||||||
maintainers: dagwieers
|
maintainers: dagwieers
|
||||||
$modules/remote_management/dellemc/:
|
$modules/remote_management/dellemc/:
|
||||||
@@ -708,6 +725,8 @@ files:
|
|||||||
labels: cisco
|
labels: cisco
|
||||||
$modules/remote_management/ipmi/:
|
$modules/remote_management/ipmi/:
|
||||||
maintainers: bgaifullin cloudnull
|
maintainers: bgaifullin cloudnull
|
||||||
|
$modules/remote_management/lenovoxcc/:
|
||||||
|
maintainers: panyy3 renxulei
|
||||||
$modules/remote_management/lxca/:
|
$modules/remote_management/lxca/:
|
||||||
maintainers: navalkp prabhosa
|
maintainers: navalkp prabhosa
|
||||||
$modules/remote_management/manageiq/:
|
$modules/remote_management/manageiq/:
|
||||||
@@ -728,7 +747,7 @@ files:
|
|||||||
$modules/remote_management/oneview/oneview_fcoe_network.py:
|
$modules/remote_management/oneview/oneview_fcoe_network.py:
|
||||||
maintainers: fgbulsoni
|
maintainers: fgbulsoni
|
||||||
$modules/remote_management/redfish/:
|
$modules/remote_management/redfish/:
|
||||||
maintainers: $team_redfish billdodd
|
maintainers: $team_redfish
|
||||||
ignore: jose-delarosa
|
ignore: jose-delarosa
|
||||||
$modules/remote_management/stacki/stacki_host.py:
|
$modules/remote_management/stacki/stacki_host.py:
|
||||||
maintainers: bsanders bbyhuy
|
maintainers: bsanders bbyhuy
|
||||||
@@ -751,6 +770,8 @@ files:
|
|||||||
ignore: erydo
|
ignore: erydo
|
||||||
$modules/source_control/github/github_release.py:
|
$modules/source_control/github/github_release.py:
|
||||||
maintainers: adrianmoisey
|
maintainers: adrianmoisey
|
||||||
|
$modules/source_control/github/github_repo.py:
|
||||||
|
maintainers: atorrescogollo
|
||||||
$modules/source_control/github/:
|
$modules/source_control/github/:
|
||||||
maintainers: stpierre
|
maintainers: stpierre
|
||||||
$modules/source_control/gitlab/:
|
$modules/source_control/gitlab/:
|
||||||
@@ -833,8 +854,10 @@ files:
|
|||||||
labels: interfaces_file
|
labels: interfaces_file
|
||||||
$modules/system/iptables_state.py:
|
$modules/system/iptables_state.py:
|
||||||
maintainers: quidame
|
maintainers: quidame
|
||||||
|
$modules/system/shutdown.py:
|
||||||
|
maintainers: nitzmahone samdoran aminvakil
|
||||||
$modules/system/java_cert.py:
|
$modules/system/java_cert.py:
|
||||||
maintainers: haad
|
maintainers: haad absynth76
|
||||||
$modules/system/java_keystore.py:
|
$modules/system/java_keystore.py:
|
||||||
maintainers: Mogztter
|
maintainers: Mogztter
|
||||||
$modules/system/kernel_blacklist.py:
|
$modules/system/kernel_blacklist.py:
|
||||||
@@ -919,7 +942,7 @@ files:
|
|||||||
maintainers: ahtik ovcharenko pyykkis
|
maintainers: ahtik ovcharenko pyykkis
|
||||||
labels: ufw
|
labels: ufw
|
||||||
$modules/system/vdo.py:
|
$modules/system/vdo.py:
|
||||||
maintainers: bgurney-rh
|
maintainers: rhawalsh
|
||||||
$modules/system/xfconf.py:
|
$modules/system/xfconf.py:
|
||||||
maintainers: russoz jbenden
|
maintainers: russoz jbenden
|
||||||
labels: xfconf
|
labels: xfconf
|
||||||
@@ -1004,27 +1027,27 @@ macros:
|
|||||||
terminals: plugins/terminal
|
terminals: plugins/terminal
|
||||||
team_aix: MorrisA bcoca d-little flynn1973 gforster kairoaraujo marvin-sinister mator molekuul ramooncamacho wtcross
|
team_aix: MorrisA bcoca d-little flynn1973 gforster kairoaraujo marvin-sinister mator molekuul ramooncamacho wtcross
|
||||||
team_bsd: JoergFiedler MacLemon bcoca dch jasperla mekanix opoplawski overhacked tuxillo
|
team_bsd: JoergFiedler MacLemon bcoca dch jasperla mekanix opoplawski overhacked tuxillo
|
||||||
team_consul: colin-nolan sgargan
|
team_consul: sgargan
|
||||||
team_cyberark_conjur: jvanderhoof ryanprior
|
team_cyberark_conjur: jvanderhoof ryanprior
|
||||||
team_e_spirit: MatrixCrawler getjack
|
team_e_spirit: MatrixCrawler getjack
|
||||||
team_flatpak: JayKayy oolongbrothers
|
team_flatpak: JayKayy oolongbrothers
|
||||||
team_gitlab: Lunik Shaps dj-wasabi marwatk waheedi zanssa scodeman
|
team_gitlab: Lunik Shaps dj-wasabi marwatk waheedi zanssa scodeman metanovii
|
||||||
team_hpux: bcoca davx8342
|
team_hpux: bcoca davx8342
|
||||||
team_huawei: QijunPan TommyLike edisonxiang freesky-edward hwDCN niuzhenguo xuxiaowei0512 yanzhangi zengchen1024 zhongjun2
|
team_huawei: QijunPan TommyLike edisonxiang freesky-edward hwDCN niuzhenguo xuxiaowei0512 yanzhangi zengchen1024 zhongjun2
|
||||||
team_ipa: Akasurde Nosmoht fxfitz
|
team_ipa: Akasurde Nosmoht fxfitz justchris1
|
||||||
team_jboss: Wolfant jairojunior wbrefvem
|
team_jboss: Wolfant jairojunior wbrefvem
|
||||||
team_keycloak: eikef ndclt
|
team_keycloak: eikef ndclt
|
||||||
team_linode: InTheCloudDan decentral1se displague rmcintosh
|
team_linode: InTheCloudDan decentral1se displague rmcintosh Charliekenney23 LBGarber
|
||||||
team_macos: Akasurde kyleabenson martinm82 danieljaouen indrajitr
|
team_macos: Akasurde kyleabenson martinm82 danieljaouen indrajitr
|
||||||
team_manageiq: abellotti cben gtanzillo yaacov zgalor dkorn evertmulder
|
team_manageiq: abellotti cben gtanzillo yaacov zgalor dkorn evertmulder
|
||||||
team_netapp: amit0701 carchi8py hulquest lmprice lonico ndswartz schmots1
|
team_netapp: amit0701 carchi8py hulquest lmprice lonico ndswartz schmots1
|
||||||
team_networking: NilashishC Qalthos danielmellado ganeshrn justjais trishnaguha sganesh-infoblox privateip
|
team_networking: NilashishC Qalthos danielmellado ganeshrn justjais trishnaguha sganesh-infoblox privateip
|
||||||
team_opennebula: ilicmilan meerkampdvv rsmontero xorel
|
team_opennebula: ilicmilan meerkampdvv rsmontero xorel nilsding
|
||||||
team_oracle: manojmeda mross22 nalsaber
|
team_oracle: manojmeda mross22 nalsaber
|
||||||
team_purestorage: bannaych dnix101 genegr lionmax opslounge raekins sdodsley sile16
|
team_purestorage: bannaych dnix101 genegr lionmax opslounge raekins sdodsley sile16
|
||||||
team_redfish: billdodd mraineri tomasg2012
|
team_redfish: mraineri tomasg2012 xmadsen renxulei
|
||||||
team_rhn: FlossWare alikins barnabycourt vritant
|
team_rhn: FlossWare alikins barnabycourt vritant
|
||||||
team_scaleway: QuentinBrosse abarbare jerome-quere kindermoumoute remyleone sieben
|
team_scaleway: QuentinBrosse abarbare jerome-quere kindermoumoute remyleone sieben
|
||||||
team_solaris: bcoca fishman jasperla jpdasma mator scathatheworm troy2914 xen0l
|
team_solaris: bcoca fishman jasperla jpdasma mator scathatheworm troy2914 xen0l
|
||||||
team_suse: commel dcermak evrardjp lrupp toabctl AnderEnder alxgu andytom
|
team_suse: commel dcermak evrardjp lrupp toabctl AnderEnder alxgu andytom sealor
|
||||||
team_virt: joshainglis karmab Aversiste Thulium-Drake
|
team_virt: joshainglis karmab tleguern Thulium-Drake Ajpantuso
|
||||||
|
|||||||
299
CHANGELOG.rst
299
CHANGELOG.rst
@@ -6,6 +6,301 @@ Community General Release Notes
|
|||||||
|
|
||||||
This changelog describes changes after version 1.0.0.
|
This changelog describes changes after version 1.0.0.
|
||||||
|
|
||||||
|
v2.5.3
|
||||||
|
======
|
||||||
|
|
||||||
|
Release Summary
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Regular bugfix release.
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- consul_acl - update the hcl allowlist to include all supported options (https://github.com/ansible-collections/community.general/pull/2495).
|
||||||
|
- consul_kv lookup plugin - allow to set ``recurse``, ``index``, ``datacenter`` and ``token`` as keyword arguments (https://github.com/ansible-collections/community.general/issues/2124).
|
||||||
|
- influxdb_user - allow creation of admin users when InfluxDB authentication is enabled but no other user exists on the database. In this scenario, InfluxDB 1.x allows only ``CREATE USER`` queries and rejects any other query (https://github.com/ansible-collections/community.general/issues/2364).
|
||||||
|
- influxdb_user - fix bug where an influxdb user has no privileges for 2 or more databases (https://github.com/ansible-collections/community.general/pull/2499).
|
||||||
|
- influxdb_user - fix bug which removed current privileges instead of appending them to existing ones (https://github.com/ansible-collections/community.general/issues/2609, https://github.com/ansible-collections/community.general/pull/2614).
|
||||||
|
- iptables_state - call ``async_status`` action plugin rather than its module (https://github.com/ansible-collections/community.general/issues/2700).
|
||||||
|
- iptables_state - fix a 'FutureWarning' in a regex and do some basic code clean up (https://github.com/ansible-collections/community.general/pull/2525).
|
||||||
|
- iptables_state - fix a broken query of ``async_status`` result with current ansible-core development version (https://github.com/ansible-collections/community.general/issues/2627, https://github.com/ansible-collections/community.general/pull/2671).
|
||||||
|
- iptables_state - fix initialization of iptables from null state when adressing more than one table (https://github.com/ansible-collections/community.general/issues/2523).
|
||||||
|
- java_cert - fix issue with incorrect alias used on PKCS#12 certificate import (https://github.com/ansible-collections/community.general/pull/2560).
|
||||||
|
- jenkins_plugin - use POST method for sending request to jenkins API when ``state`` option is one of ``enabled``, ``disabled``, ``pinned``, ``unpinned``, or ``absent`` (https://github.com/ansible-collections/community.general/issues/2510).
|
||||||
|
- json_query filter plugin - avoid 'unknown type' errors for more Ansible internal types (https://github.com/ansible-collections/community.general/pull/2607).
|
||||||
|
- module_helper module utils - ``CmdMixin`` must also use ``LC_ALL`` to enforce locale choice (https://github.com/ansible-collections/community.general/pull/2731).
|
||||||
|
- netcup_dns - use ``str(ex)`` instead of unreliable ``ex.message`` in exception handling to fix ``AttributeError`` in error cases (https://github.com/ansible-collections/community.general/pull/2590).
|
||||||
|
- nmap inventory plugin - fix local variable error when cache is disabled (https://github.com/ansible-collections/community.general/issues/2512).
|
||||||
|
- ovir4 inventory script - improve configparser creation to avoid crashes for options without values (https://github.com/ansible-collections/community.general/issues/674).
|
||||||
|
- proxmox_kvm - fixed ``vmid`` return value when VM with ``name`` already exists (https://github.com/ansible-collections/community.general/issues/2648).
|
||||||
|
- redis cache - improved connection string parsing (https://github.com/ansible-collections/community.general/issues/497).
|
||||||
|
- rhsm_release - fix the issue that module considers 8, 7Client and 7Workstation as invalid releases (https://github.com/ansible-collections/community.general/pull/2571).
|
||||||
|
- ssh_config - reduce stormssh searches based on host (https://github.com/ansible-collections/community.general/pull/2568/).
|
||||||
|
- terraform - ensure the workspace is set back to its previous value when the apply fails (https://github.com/ansible-collections/community.general/pull/2634).
|
||||||
|
- xfconf - also use ``LC_ALL`` to enforce locale choice (https://github.com/ansible-collections/community.general/issues/2715).
|
||||||
|
- zypper_repository - fix idempotency on adding repository with ``$releasever`` and ``$basearch`` variables (https://github.com/ansible-collections/community.general/issues/1985).
|
||||||
|
|
||||||
|
v2.5.2
|
||||||
|
======
|
||||||
|
|
||||||
|
Release Summary
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Regular bugfix release.
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- composer - use ``no-interaction`` option when discovering available options to avoid an issue where composer hangs (https://github.com/ansible-collections/community.general/pull/2348).
|
||||||
|
- hiera lookup plugin - converts the return type of plugin to unicode string (https://github.com/ansible-collections/community.general/pull/2329).
|
||||||
|
- influxdb_retention_policy - ensure idempotent module execution with different duration and shard duration parameter values (https://github.com/ansible-collections/community.general/issues/2281).
|
||||||
|
- influxdb_retention_policy - fix bug where ``INF`` duration values failed parsing (https://github.com/ansible-collections/community.general/pull/2385).
|
||||||
|
- inventory and vault scripts - change file permissions to make vendored inventory and vault scripts exectuable (https://github.com/ansible-collections/community.general/pull/2337).
|
||||||
|
- jenkins_plugin - fixes Python 2 compatibility issue (https://github.com/ansible-collections/community.general/pull/2340).
|
||||||
|
- jira - fixed error when loading base64-encoded content as attachment (https://github.com/ansible-collections/community.general/pull/2349).
|
||||||
|
- linode_v4 - changed the error message to point to the correct bugtracker URL (https://github.com/ansible-collections/community.general/pull/2430).
|
||||||
|
- nmap inventory plugin - fix cache and constructed group support (https://github.com/ansible-collections/community.general/issues/2242).
|
||||||
|
- nmcli - compare MAC addresses case insensitively to fix idempotency issue (https://github.com/ansible-collections/community.general/issues/2409).
|
||||||
|
- nmcli - if type is ``bridge-slave`` add ``slave-type bridge`` to ``nmcli`` command (https://github.com/ansible-collections/community.general/issues/2408).
|
||||||
|
- one_vm - Allow missing NIC keys (https://github.com/ansible-collections/community.general/pull/2435).
|
||||||
|
- ovirt* modules - remove bad unnecessary import for current ansible-core development version (https://github.com/ansible-collections/community.general/pull/2381).
|
||||||
|
- proxmox inventory - added handling of commas in KVM agent configuration string (https://github.com/ansible-collections/community.general/pull/2245).
|
||||||
|
- puppet - replace ``console` with ``stdout`` in ``logdest`` option when ``all`` has been chosen (https://github.com/ansible-collections/community.general/issues/1190).
|
||||||
|
- stackpath_compute inventory script - fix broken validation checks for client ID and client secret (https://github.com/ansible-collections/community.general/pull/2448).
|
||||||
|
- svr4pkg - convert string to a bytes-like object to avoid ``TypeError`` with Python 3 (https://github.com/ansible-collections/community.general/issues/2373).
|
||||||
|
- terraform - fix issue that cause the destroy to fail because from Terraform 0.15 on, the ``terraform destroy -force`` option is replaced with ``terraform destroy -auto-approve`` (https://github.com/ansible-collections/community.general/issues/2247).
|
||||||
|
- terraform - fix issue that cause the execution fail because from Terraform 0.15 on, the ``-var`` and ``-var-file`` options are no longer available on ``terraform validate`` (https://github.com/ansible-collections/community.general/pull/2246).
|
||||||
|
- terraform - remove uses of ``use_unsafe_shell=True`` (https://github.com/ansible-collections/community.general/pull/2246).
|
||||||
|
- zfs - certain ZFS properties, especially sizes, would lead to a task being falsely marked as "changed" even when no actual change was made (https://github.com/ansible-collections/community.general/issues/975, https://github.com/ansible-collections/community.general/pull/2454).
|
||||||
|
|
||||||
|
v2.5.1
|
||||||
|
======
|
||||||
|
|
||||||
|
Release Summary
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Bugfix release for some bugs discovered right after the 2.5.0 release.
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- funcd connection plugin - can now load (https://github.com/ansible-collections/community.general/pull/2235).
|
||||||
|
- jira - fixed calling of ``isinstance`` (https://github.com/ansible-collections/community.general/issues/2234).
|
||||||
|
|
||||||
|
v2.5.0
|
||||||
|
======
|
||||||
|
|
||||||
|
Release Summary
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Regular feature release. Will be the last 2.x.0 minor release.
|
||||||
|
|
||||||
|
Minor Changes
|
||||||
|
-------------
|
||||||
|
|
||||||
|
- apache2_mod_proxy - refactored/cleaned-up part of the code (https://github.com/ansible-collections/community.general/pull/2142).
|
||||||
|
- atomic_container - using ``get_bin_path()`` before calling ``run_command()`` (https://github.com/ansible-collections/community.general/pull/2144).
|
||||||
|
- atomic_host - using ``get_bin_path()`` before calling ``run_command()`` (https://github.com/ansible-collections/community.general/pull/2144).
|
||||||
|
- atomic_image - using ``get_bin_path()`` before calling ``run_command()`` (https://github.com/ansible-collections/community.general/pull/2144).
|
||||||
|
- beadm - minor refactor converting multiple statements to a single list literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- bitbucket_pipeline_variable - removed unreachable code (https://github.com/ansible-collections/community.general/pull/2157).
|
||||||
|
- hiera lookup - minor refactor converting multiple statements to a single list literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- ipa_config - add new options ``ipaconfigstring``, ``ipadefaultprimarygroup``, ``ipagroupsearchfields``, ``ipahomesrootdir``, ``ipabrkauthzdata``, ``ipamaxusernamelength``, ``ipapwdexpadvnotify``, ``ipasearchrecordslimit``, ``ipasearchtimelimit``, ``ipauserauthtype``, and ``ipausersearchfields`` (https://github.com/ansible-collections/community.general/pull/2116).
|
||||||
|
- ipa_user - fix ``userauthtype`` option to take in list of strings for the multi-select field instead of single string (https://github.com/ansible-collections/community.general/pull/2174).
|
||||||
|
- ipwcli_dns - minor refactor converting multiple statements to a single list literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- java_cert - change ``state: present`` to check certificates by hash, not just alias name (https://github.com/ansible/ansible/issues/43249).
|
||||||
|
- jira - added ``attach`` operation, which allows a user to attach a file to an issue (https://github.com/ansible-collections/community.general/pull/2192).
|
||||||
|
- jira - added parameter ``account_id`` for compatibility with recent versions of JIRA (https://github.com/ansible-collections/community.general/issues/818, https://github.com/ansible-collections/community.general/pull/1978).
|
||||||
|
- known_hosts module utils - minor refactor converting multiple statements to a single list literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- module_helper module utils - added management of facts and adhoc setting of the initial value for variables (https://github.com/ansible-collections/community.general/pull/2188).
|
||||||
|
- module_helper module utils - added mechanism to manage variables, providing automatic output of variables, change status and diff information (https://github.com/ansible-collections/community.general/pull/2162).
|
||||||
|
- nictagadm - minor refactor converting multiple statements to a single list literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- npm - add ``no_bin_links`` option (https://github.com/ansible-collections/community.general/issues/2128).
|
||||||
|
- ovh_ip_failover - removed unreachable code (https://github.com/ansible-collections/community.general/pull/2157).
|
||||||
|
- proxmox inventory plugin - added ``Constructable`` class to the inventory to provide options ``strict``, ``keyed_groups``, ``groups``, and ``compose`` (https://github.com/ansible-collections/community.general/pull/2180).
|
||||||
|
- proxmox inventory plugin - added ``proxmox_agent_interfaces`` fact describing network interfaces returned from a QEMU guest agent (https://github.com/ansible-collections/community.general/pull/2148).
|
||||||
|
- rhevm - removed unreachable code (https://github.com/ansible-collections/community.general/pull/2157).
|
||||||
|
- smartos_image_info - minor refactor converting multiple statements to a single list literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- svr4pkg - minor refactor converting multiple statements to a single list literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- xattr - minor refactor converting multiple statements to a single list literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- xfconf - changed implementation to use ``ModuleHelper`` new features (https://github.com/ansible-collections/community.general/pull/2188).
|
||||||
|
- zfs_facts - minor refactor converting multiple statements to a single list literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- zpool_facts - minor refactor converting multiple statements to a single list literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
|
||||||
|
Security Fixes
|
||||||
|
--------------
|
||||||
|
|
||||||
|
- java_cert - remove password from ``run_command`` arguments (https://github.com/ansible-collections/community.general/pull/2008).
|
||||||
|
- java_keystore - pass secret to keytool through an environment variable to not expose it as a commandline argument (https://github.com/ansible-collections/community.general/issues/1668).
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- dimensiondata_network - bug when formatting message, instead of % a simple comma was used (https://github.com/ansible-collections/community.general/pull/2139).
|
||||||
|
- github_repo - PyGithub bug does not allow explicit port in ``base_url``. Specifying port is not required (https://github.com/PyGithub/PyGithub/issues/1913).
|
||||||
|
- haproxy - fix a bug preventing haproxy from properly entering ``DRAIN`` mode (https://github.com/ansible-collections/community.general/issues/1913).
|
||||||
|
- ipa_user - allow ``sshpubkey`` to permit multiple word comments (https://github.com/ansible-collections/community.general/pull/2159).
|
||||||
|
- java_cert - allow setting ``state: absent`` by providing just the ``cert_alias`` (https://github.com/ansible/ansible/issues/27982).
|
||||||
|
- java_cert - properly handle proxy arguments when the scheme is provided (https://github.com/ansible/ansible/issues/54481).
|
||||||
|
- java_keystore - improve error handling and return ``cmd`` as documented. Force ``LANG``, ``LC_ALL`` and ``LC_MESSAGES`` environment variables to ``C`` to rely on ``keytool`` output parsing. Fix pylint's ``unused-variable`` and ``no-else-return`` hints (https://github.com/ansible-collections/community.general/pull/2183).
|
||||||
|
- java_keystore - use tempfile lib to create temporary files with randomized names, and remove the temporary PKCS#12 keystore as well as other materials (https://github.com/ansible-collections/community.general/issues/1667).
|
||||||
|
- jira - fixed fields' update in ticket transitions (https://github.com/ansible-collections/community.general/issues/818).
|
||||||
|
- kibana_plugin - added missing parameters to ``remove_plugin`` when using ``state=present force=true``, and fix potential quoting errors when invoking ``kibana`` (https://github.com/ansible-collections/community.general/pull/2143).
|
||||||
|
- module_helper module utils - fixed decorator ``cause_changes`` (https://github.com/ansible-collections/community.general/pull/2203).
|
||||||
|
- pkgutil - fixed calls to ``list.extend()`` (https://github.com/ansible-collections/community.general/pull/2161).
|
||||||
|
- vmadm - correct type of list elements in ``resolvers`` parameter (https://github.com/ansible-collections/community.general/issues/2135).
|
||||||
|
- xfconf - module was not honoring check mode when ``state`` was ``absent`` (https://github.com/ansible-collections/community.general/pull/2185).
|
||||||
|
|
||||||
|
New Plugins
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Filter
|
||||||
|
~~~~~~
|
||||||
|
|
||||||
|
- dict - The ``dict`` function as a filter: converts a list of tuples to a dictionary
|
||||||
|
- path_join - Redirects to ansible.builtin.path_join for ansible-base 2.10 or newer, and provides a compatible implementation for Ansible 2.9
|
||||||
|
|
||||||
|
New Modules
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Identity
|
||||||
|
~~~~~~~~
|
||||||
|
|
||||||
|
ipa
|
||||||
|
^^^
|
||||||
|
|
||||||
|
- ipa_otpconfig - Manage FreeIPA OTP Configuration Settings
|
||||||
|
- ipa_otptoken - Manage FreeIPA OTPs
|
||||||
|
|
||||||
|
Monitoring
|
||||||
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
- spectrum_model_attrs - Enforce a model's attributes in CA Spectrum.
|
||||||
|
|
||||||
|
Net Tools
|
||||||
|
~~~~~~~~~
|
||||||
|
|
||||||
|
pritunl
|
||||||
|
^^^^^^^
|
||||||
|
|
||||||
|
- pritunl_org - Manages Pritunl Organizations using the Pritunl API
|
||||||
|
- pritunl_org_info - List Pritunl Organizations using the Pritunl API
|
||||||
|
|
||||||
|
v2.4.0
|
||||||
|
======
|
||||||
|
|
||||||
|
Release Summary
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Regular feature and bugfix release.
|
||||||
|
|
||||||
|
Minor Changes
|
||||||
|
-------------
|
||||||
|
|
||||||
|
- vdo - add ``force`` option (https://github.com/ansible-collections/community.general/issues/2101).
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- git_config - fixed scope ``file`` behaviour and added integraton test for it (https://github.com/ansible-collections/community.general/issues/2117).
|
||||||
|
- zypper, zypper_repository - respect ``PATH`` environment variable when resolving zypper executable path (https://github.com/ansible-collections/community.general/pull/2094).
|
||||||
|
|
||||||
|
New Plugins
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Become
|
||||||
|
~~~~~~
|
||||||
|
|
||||||
|
- sudosu - Run tasks using sudo su -
|
||||||
|
|
||||||
|
Callback
|
||||||
|
~~~~~~~~
|
||||||
|
|
||||||
|
- loganalytics - Posts task results to Azure Log Analytics
|
||||||
|
|
||||||
|
New Modules
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Cloud
|
||||||
|
~~~~~
|
||||||
|
|
||||||
|
opennebula
|
||||||
|
^^^^^^^^^^
|
||||||
|
|
||||||
|
- one_template - Manages OpenNebula templates
|
||||||
|
|
||||||
|
Remote Management
|
||||||
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
lenovoxcc
|
||||||
|
^^^^^^^^^
|
||||||
|
|
||||||
|
- xcc_redfish_command - Manages Lenovo Out-Of-Band controllers using Redfish APIs
|
||||||
|
|
||||||
|
v2.3.0
|
||||||
|
======
|
||||||
|
|
||||||
|
Release Summary
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Fixes compatibility issues with the latest ansible-core 2.11 beta, some more bugs, and contains several new features, modules and plugins.
|
||||||
|
|
||||||
|
Minor Changes
|
||||||
|
-------------
|
||||||
|
|
||||||
|
- archive - refactored some reused code out into a couple of functions (https://github.com/ansible-collections/community.general/pull/2061).
|
||||||
|
- csv module utils - new module_utils for shared functions between ``from_csv`` filter and ``read_csv`` module (https://github.com/ansible-collections/community.general/pull/2037).
|
||||||
|
- ipa_sudorule - add support for setting sudo runasuser (https://github.com/ansible-collections/community.general/pull/2031).
|
||||||
|
- jenkins_job - add a ``validate_certs`` parameter that allows disabling TLS/SSL certificate validation (https://github.com/ansible-collections/community.general/issues/255).
|
||||||
|
- kibana_plugin - add parameter for passing ``--allow-root`` flag to kibana and kibana-plugin commands (https://github.com/ansible-collections/community.general/pull/2014).
|
||||||
|
- proxmox - added ``purge`` module parameter for use when deleting lxc's with HA options (https://github.com/ansible-collections/community.general/pull/2013).
|
||||||
|
- proxmox inventory plugin - added ``tags_parsed`` fact containing tags parsed as a list (https://github.com/ansible-collections/community.general/pull/1949).
|
||||||
|
- proxmox_kvm - added new module parameter ``tags`` for use with PVE 6+ (https://github.com/ansible-collections/community.general/pull/2000).
|
||||||
|
- rax - elements of list parameters are now validated (https://github.com/ansible-collections/community.general/pull/2006).
|
||||||
|
- rax_cdb_user - elements of list parameters are now validated (https://github.com/ansible-collections/community.general/pull/2006).
|
||||||
|
- rax_scaling_group - elements of list parameters are now validated (https://github.com/ansible-collections/community.general/pull/2006).
|
||||||
|
- read_csv - refactored read_csv module to use shared csv functions from csv module_utils (https://github.com/ansible-collections/community.general/pull/2037).
|
||||||
|
- redfish_* modules, redfish_utils module utils - add support for Redfish session create, delete, and authenticate (https://github.com/ansible-collections/community.general/issues/1975).
|
||||||
|
- snmp_facts - added parameters ``timeout`` and ``retries`` to module (https://github.com/ansible-collections/community.general/issues/980).
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- Mark various module options with ``no_log=False`` which have a name that potentially could leak secrets, but which do not (https://github.com/ansible-collections/community.general/pull/2001).
|
||||||
|
- module_helper module utils - actually ignoring formatting of parameters with value ``None`` (https://github.com/ansible-collections/community.general/pull/2024).
|
||||||
|
- module_helper module utils - handling ``ModuleHelperException`` now properly calls ``fail_json()`` (https://github.com/ansible-collections/community.general/pull/2024).
|
||||||
|
- module_helper module utils - use the command name as-is in ``CmdMixin`` if it fails ``get_bin_path()`` - allowing full path names to be passed (https://github.com/ansible-collections/community.general/pull/2024).
|
||||||
|
- nios* modules - fix modules to work with ansible-core 2.11 (https://github.com/ansible-collections/community.general/pull/2057).
|
||||||
|
- proxmox - removed requirement that root password is provided when containter state is ``present`` (https://github.com/ansible-collections/community.general/pull/1999).
|
||||||
|
- proxmox inventory - exclude qemu templates from inclusion to the inventory via pools (https://github.com/ansible-collections/community.general/issues/1986, https://github.com/ansible-collections/community.general/pull/1991).
|
||||||
|
- proxmox inventory plugin - allowed proxomox tag string to contain commas when returned as fact (https://github.com/ansible-collections/community.general/pull/1949).
|
||||||
|
- redfish_config module, redfish_utils module utils - fix IndexError in ``SetManagerNic`` command (https://github.com/ansible-collections/community.general/issues/1692).
|
||||||
|
- scaleway inventory plugin - fix pagination on scaleway inventory plugin (https://github.com/ansible-collections/community.general/pull/2036).
|
||||||
|
- stacki_host - replaced ``default`` to environment variables with ``fallback`` to them (https://github.com/ansible-collections/community.general/pull/2072).
|
||||||
|
|
||||||
|
New Plugins
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Filter
|
||||||
|
~~~~~~
|
||||||
|
|
||||||
|
- from_csv - Converts CSV text input into list of dicts
|
||||||
|
|
||||||
|
New Modules
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Net Tools
|
||||||
|
~~~~~~~~~
|
||||||
|
|
||||||
|
- gandi_livedns - Manage Gandi LiveDNS records
|
||||||
|
|
||||||
|
pritunl
|
||||||
|
^^^^^^^
|
||||||
|
|
||||||
|
- pritunl_user - Manage Pritunl Users using the Pritunl API
|
||||||
|
- pritunl_user_info - List Pritunl Users using the Pritunl API
|
||||||
|
|
||||||
v2.2.0
|
v2.2.0
|
||||||
======
|
======
|
||||||
|
|
||||||
@@ -369,7 +664,7 @@ Minor Changes
|
|||||||
- The collection is now actively tested in CI with the latest Ansible 2.9 release.
|
- The collection is now actively tested in CI with the latest Ansible 2.9 release.
|
||||||
- airbrake_deployment - add ``version`` param; clarified docs on ``revision`` param (https://github.com/ansible-collections/community.general/pull/583).
|
- airbrake_deployment - add ``version`` param; clarified docs on ``revision`` param (https://github.com/ansible-collections/community.general/pull/583).
|
||||||
- apk - added ``no_cache`` option (https://github.com/ansible-collections/community.general/pull/548).
|
- apk - added ``no_cache`` option (https://github.com/ansible-collections/community.general/pull/548).
|
||||||
- archive - fix paramater types (https://github.com/ansible-collections/community.general/pull/1039).
|
- archive - fix parameter types (https://github.com/ansible-collections/community.general/pull/1039).
|
||||||
- cloudflare_dns - add support for environment variable ``CLOUDFLARE_TOKEN`` (https://github.com/ansible-collections/community.general/pull/1238).
|
- cloudflare_dns - add support for environment variable ``CLOUDFLARE_TOKEN`` (https://github.com/ansible-collections/community.general/pull/1238).
|
||||||
- consul - added support for tcp checks (https://github.com/ansible-collections/community.general/issues/1128).
|
- consul - added support for tcp checks (https://github.com/ansible-collections/community.general/issues/1128).
|
||||||
- datadog - mark ``notification_message`` as ``no_log`` (https://github.com/ansible-collections/community.general/pull/1338).
|
- datadog - mark ``notification_message`` as ``no_log`` (https://github.com/ansible-collections/community.general/pull/1338).
|
||||||
@@ -516,7 +811,7 @@ Breaking Changes / Porting Guide
|
|||||||
If you use ansible-base 2.10 or newer and did not install Ansible 3.0.0, but installed (and/or upgraded) community.general manually, you need to make sure to also install ``community.postgresql`` if you are using any of the ``postgresql`` modules.
|
If you use ansible-base 2.10 or newer and did not install Ansible 3.0.0, but installed (and/or upgraded) community.general manually, you need to make sure to also install ``community.postgresql`` if you are using any of the ``postgresql`` modules.
|
||||||
While ansible-base 2.10 or newer can use the redirects that community.general 2.0.0 adds, the collection they point to (community.postgresql) must be installed for them to work.
|
While ansible-base 2.10 or newer can use the redirects that community.general 2.0.0 adds, the collection they point to (community.postgresql) must be installed for them to work.
|
||||||
- The Google cloud inventory script ``gce.py`` has been migrated to the ``community.google`` collection. Install the ``community.google`` collection in order to continue using it.
|
- The Google cloud inventory script ``gce.py`` has been migrated to the ``community.google`` collection. Install the ``community.google`` collection in order to continue using it.
|
||||||
- archive - remove path folder itself when ``remove`` paramater is true (https://github.com/ansible-collections/community.general/issues/1041).
|
- archive - remove path folder itself when ``remove`` parameter is true (https://github.com/ansible-collections/community.general/issues/1041).
|
||||||
- log_plays callback - add missing information to the logs generated by the callback plugin. This changes the log message format (https://github.com/ansible-collections/community.general/pull/442).
|
- log_plays callback - add missing information to the logs generated by the callback plugin. This changes the log message format (https://github.com/ansible-collections/community.general/pull/442).
|
||||||
- passwordstore lookup plugin - now parsing a password store entry as YAML if possible, skipping the first line (which by convention only contains the password and nothing else). If it cannot be parsed as YAML, the old ``key: value`` parser will be used to process the entry. Can break backwards compatibility if YAML formatted code was parsed in a non-YAML interpreted way, e.g. ``foo: [bar, baz]`` will become a list with two elements in the new version, but a string ``'[bar, baz]'`` in the old (https://github.com/ansible-collections/community.general/issues/1673).
|
- passwordstore lookup plugin - now parsing a password store entry as YAML if possible, skipping the first line (which by convention only contains the password and nothing else). If it cannot be parsed as YAML, the old ``key: value`` parser will be used to process the entry. Can break backwards compatibility if YAML formatted code was parsed in a non-YAML interpreted way, e.g. ``foo: [bar, baz]`` will become a list with two elements in the new version, but a string ``'[bar, baz]'`` in the old (https://github.com/ansible-collections/community.general/issues/1673).
|
||||||
- pkgng - passing ``name: *`` with ``state: absent`` will no longer remove every installed package from the system. It is now a noop. (https://github.com/ansible-collections/community.general/pull/569).
|
- pkgng - passing ``name: *`` with ``state: absent`` will no longer remove every installed package from the system. It is now a noop. (https://github.com/ansible-collections/community.general/pull/569).
|
||||||
|
|||||||
32
CONTRIBUTING.md
Normal file
32
CONTRIBUTING.md
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
# Contributing
|
||||||
|
|
||||||
|
We follow [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) in all our contributions and interactions within this repository.
|
||||||
|
|
||||||
|
If you are a committer, also refer to the [collection's committer guidelines](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md).
|
||||||
|
|
||||||
|
## Issue tracker
|
||||||
|
|
||||||
|
Whether you are looking for an opportunity to contribute or you found a bug and already know how to solve it, please go to the [issue tracker](https://github.com/ansible-collections/community.general/issues).
|
||||||
|
There you can find feature ideas to implement, reports about bugs to solve, or submit an issue to discuss your idea before implementing it which can help choose a right direction at the beginning of your work and potentially save a lot of time and effort.
|
||||||
|
Also somebody may already have started discussing or working on implementing the same or a similar idea,
|
||||||
|
so you can cooperate to create a better solution together.
|
||||||
|
|
||||||
|
* If you are interested in starting with an easy issue, look for [issues with an `easyfix` label](https://github.com/ansible-collections/community.general/labels/easyfix).
|
||||||
|
* Often issues that are waiting for contributors to pick up have [the `waiting_on_contributor` label](https://github.com/ansible-collections/community.general/labels/waiting_on_contributor).
|
||||||
|
|
||||||
|
## Open pull requests
|
||||||
|
|
||||||
|
Look through currently [open pull requests](https://github.com/ansible-collections/community.general/pulls).
|
||||||
|
You can help by reviewing them. Reviews help move pull requests to merge state. Some good pull requests cannot be merged only due to a lack of reviews. And it is always worth saying that good reviews are often more valuable than pull requests themselves.
|
||||||
|
Note that reviewing does not only mean code review, but also offering comments on new interfaces added to existing plugins/modules, interfaces of new plugins/modules, improving language (not everyone is a native english speaker), or testing bugfixes and new features!
|
||||||
|
|
||||||
|
Also, consider taking up a valuable, reviewed, but abandoned pull request which you could politely ask the original authors to complete yourself.
|
||||||
|
|
||||||
|
* Try committing your changes with an informative but short commit message.
|
||||||
|
* All commits of a pull request branch will be squashed into one commit at last. That does not mean you must have only one commit on your pull request, though!
|
||||||
|
* Please try not to force-push if it is not needed, so reviewers and other users looking at your pull request later can see the pull request commit history.
|
||||||
|
* Do not add merge commits to your PR. The bot will complain and you will have to rebase ([instructions for rebasing](https://docs.ansible.com/ansible/latest/dev_guide/developing_rebasing.html)) to remove them before your PR can be merged. To avoid that git automatically does merges during pulls, you can configure it to do rebases instead by running `git config pull.rebase true` inside the respository checkout.
|
||||||
|
|
||||||
|
You can also read [our Quick-start development guide](https://github.com/ansible/community-docs/blob/main/create_pr_quick_start_guide.rst).
|
||||||
|
|
||||||
|
If you find any inconsistencies or places in this document which can be improved, feel free to raise an issue or pull request to fix it.
|
||||||
12
README.md
12
README.md
@@ -7,9 +7,11 @@ This repo contains the `community.general` Ansible Collection. The collection in
|
|||||||
|
|
||||||
You can find [documentation for this collection on the Ansible docs site](https://docs.ansible.com/ansible/latest/collections/community/general/).
|
You can find [documentation for this collection on the Ansible docs site](https://docs.ansible.com/ansible/latest/collections/community/general/).
|
||||||
|
|
||||||
|
Please note that this collection does **not** support Windows targets. Only connection plugins included in this collection might support Windows targets, and will explicitly mention that in their documentation if they do so.
|
||||||
|
|
||||||
## Tested with Ansible
|
## Tested with Ansible
|
||||||
|
|
||||||
Tested with the current Ansible 2.9 and 2.10 releases and the current development version of Ansible. Ansible versions before 2.9.10 are not supported.
|
Tested with the current Ansible 2.9, ansible-base 2.10 and ansible-core 2.11 releases and the current development version of ansible-core. Ansible versions before 2.9.10 are not supported.
|
||||||
|
|
||||||
## External requirements
|
## External requirements
|
||||||
|
|
||||||
@@ -48,6 +50,8 @@ export COLLECTIONS_PATH=$(pwd)/collections:$COLLECTIONS_PATH
|
|||||||
|
|
||||||
You can find more information in the [developer guide for collections](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#contributing-to-collections), and in the [Ansible Community Guide](https://docs.ansible.com/ansible/latest/community/index.html).
|
You can find more information in the [developer guide for collections](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#contributing-to-collections), and in the [Ansible Community Guide](https://docs.ansible.com/ansible/latest/community/index.html).
|
||||||
|
|
||||||
|
Also for some notes specific to this collection see [our CONTRIBUTING documentation](https://github.com/ansible-collections/community.general/blob/main/CONTRIBUTING.md).
|
||||||
|
|
||||||
### Running tests
|
### Running tests
|
||||||
|
|
||||||
See [here](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#testing-collections).
|
See [here](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#testing-collections).
|
||||||
@@ -56,10 +60,10 @@ See [here](https://docs.ansible.com/ansible/devel/dev_guide/developing_collectio
|
|||||||
|
|
||||||
We have a dedicated Working Group for Ansible development.
|
We have a dedicated Working Group for Ansible development.
|
||||||
|
|
||||||
You can find other people interested on the following Freenode IRC channels -
|
You can find other people interested on the following [Libera.chat](https://libera.chat/) IRC channels -
|
||||||
- `#ansible` - For general use questions and support.
|
- `#ansible` - For general use questions and support.
|
||||||
- `#ansible-devel` - For discussions on developer topics and code related to features or bugs.
|
- `#ansible-devel` - For discussions on developer topics and code related to features or bugs in ansible-core.
|
||||||
- `#ansible-community` - For discussions on community topics and community meetings.
|
- `#ansible-community` - For discussions on community topics and community meetings, and for general development questions for community collections.
|
||||||
|
|
||||||
For more information about communities, meetings and agendas see [Community Wiki](https://github.com/ansible/community/wiki/Community).
|
For more information about communities, meetings and agendas see [Community Wiki](https://github.com/ansible/community/wiki/Community).
|
||||||
|
|
||||||
|
|||||||
@@ -145,7 +145,7 @@ releases:
|
|||||||
- The Google cloud inventory script ``gce.py`` has been migrated to the ``community.google``
|
- The Google cloud inventory script ``gce.py`` has been migrated to the ``community.google``
|
||||||
collection. Install the ``community.google`` collection in order to continue
|
collection. Install the ``community.google`` collection in order to continue
|
||||||
using it.
|
using it.
|
||||||
- archive - remove path folder itself when ``remove`` paramater is true (https://github.com/ansible-collections/community.general/issues/1041).
|
- archive - remove path folder itself when ``remove`` parameter is true (https://github.com/ansible-collections/community.general/issues/1041).
|
||||||
- log_plays callback - add missing information to the logs generated by the
|
- log_plays callback - add missing information to the logs generated by the
|
||||||
callback plugin. This changes the log message format (https://github.com/ansible-collections/community.general/pull/442).
|
callback plugin. This changes the log message format (https://github.com/ansible-collections/community.general/pull/442).
|
||||||
- 'passwordstore lookup plugin - now parsing a password store entry as YAML
|
- 'passwordstore lookup plugin - now parsing a password store entry as YAML
|
||||||
@@ -414,7 +414,7 @@ releases:
|
|||||||
- airbrake_deployment - add ``version`` param; clarified docs on ``revision``
|
- airbrake_deployment - add ``version`` param; clarified docs on ``revision``
|
||||||
param (https://github.com/ansible-collections/community.general/pull/583).
|
param (https://github.com/ansible-collections/community.general/pull/583).
|
||||||
- apk - added ``no_cache`` option (https://github.com/ansible-collections/community.general/pull/548).
|
- apk - added ``no_cache`` option (https://github.com/ansible-collections/community.general/pull/548).
|
||||||
- archive - fix paramater types (https://github.com/ansible-collections/community.general/pull/1039).
|
- archive - fix parameter types (https://github.com/ansible-collections/community.general/pull/1039).
|
||||||
- cloudflare_dns - add support for environment variable ``CLOUDFLARE_TOKEN``
|
- cloudflare_dns - add support for environment variable ``CLOUDFLARE_TOKEN``
|
||||||
(https://github.com/ansible-collections/community.general/pull/1238).
|
(https://github.com/ansible-collections/community.general/pull/1238).
|
||||||
- consul - added support for tcp checks (https://github.com/ansible-collections/community.general/issues/1128).
|
- consul - added support for tcp checks (https://github.com/ansible-collections/community.general/issues/1128).
|
||||||
@@ -1544,3 +1544,414 @@ releases:
|
|||||||
name: version_sort
|
name: version_sort
|
||||||
namespace: null
|
namespace: null
|
||||||
release_date: '2021-03-08'
|
release_date: '2021-03-08'
|
||||||
|
2.3.0:
|
||||||
|
changes:
|
||||||
|
bugfixes:
|
||||||
|
- Mark various module options with ``no_log=False`` which have a name that potentially
|
||||||
|
could leak secrets, but which do not (https://github.com/ansible-collections/community.general/pull/2001).
|
||||||
|
- module_helper module utils - actually ignoring formatting of parameters with
|
||||||
|
value ``None`` (https://github.com/ansible-collections/community.general/pull/2024).
|
||||||
|
- module_helper module utils - handling ``ModuleHelperException`` now properly
|
||||||
|
calls ``fail_json()`` (https://github.com/ansible-collections/community.general/pull/2024).
|
||||||
|
- module_helper module utils - use the command name as-is in ``CmdMixin`` if
|
||||||
|
it fails ``get_bin_path()`` - allowing full path names to be passed (https://github.com/ansible-collections/community.general/pull/2024).
|
||||||
|
- nios* modules - fix modules to work with ansible-core 2.11 (https://github.com/ansible-collections/community.general/pull/2057).
|
||||||
|
- proxmox - removed requirement that root password is provided when containter
|
||||||
|
state is ``present`` (https://github.com/ansible-collections/community.general/pull/1999).
|
||||||
|
- proxmox inventory - exclude qemu templates from inclusion to the inventory
|
||||||
|
via pools (https://github.com/ansible-collections/community.general/issues/1986,
|
||||||
|
https://github.com/ansible-collections/community.general/pull/1991).
|
||||||
|
- proxmox inventory plugin - allowed proxomox tag string to contain commas when
|
||||||
|
returned as fact (https://github.com/ansible-collections/community.general/pull/1949).
|
||||||
|
- redfish_config module, redfish_utils module utils - fix IndexError in ``SetManagerNic``
|
||||||
|
command (https://github.com/ansible-collections/community.general/issues/1692).
|
||||||
|
- scaleway inventory plugin - fix pagination on scaleway inventory plugin (https://github.com/ansible-collections/community.general/pull/2036).
|
||||||
|
- stacki_host - replaced ``default`` to environment variables with ``fallback``
|
||||||
|
to them (https://github.com/ansible-collections/community.general/pull/2072).
|
||||||
|
minor_changes:
|
||||||
|
- archive - refactored some reused code out into a couple of functions (https://github.com/ansible-collections/community.general/pull/2061).
|
||||||
|
- csv module utils - new module_utils for shared functions between ``from_csv``
|
||||||
|
filter and ``read_csv`` module (https://github.com/ansible-collections/community.general/pull/2037).
|
||||||
|
- ipa_sudorule - add support for setting sudo runasuser (https://github.com/ansible-collections/community.general/pull/2031).
|
||||||
|
- jenkins_job - add a ``validate_certs`` parameter that allows disabling TLS/SSL
|
||||||
|
certificate validation (https://github.com/ansible-collections/community.general/issues/255).
|
||||||
|
- kibana_plugin - add parameter for passing ``--allow-root`` flag to kibana
|
||||||
|
and kibana-plugin commands (https://github.com/ansible-collections/community.general/pull/2014).
|
||||||
|
- proxmox - added ``purge`` module parameter for use when deleting lxc's with
|
||||||
|
HA options (https://github.com/ansible-collections/community.general/pull/2013).
|
||||||
|
- proxmox inventory plugin - added ``tags_parsed`` fact containing tags parsed
|
||||||
|
as a list (https://github.com/ansible-collections/community.general/pull/1949).
|
||||||
|
- proxmox_kvm - added new module parameter ``tags`` for use with PVE 6+ (https://github.com/ansible-collections/community.general/pull/2000).
|
||||||
|
- rax - elements of list parameters are now validated (https://github.com/ansible-collections/community.general/pull/2006).
|
||||||
|
- rax_cdb_user - elements of list parameters are now validated (https://github.com/ansible-collections/community.general/pull/2006).
|
||||||
|
- rax_scaling_group - elements of list parameters are now validated (https://github.com/ansible-collections/community.general/pull/2006).
|
||||||
|
- read_csv - refactored read_csv module to use shared csv functions from csv
|
||||||
|
module_utils (https://github.com/ansible-collections/community.general/pull/2037).
|
||||||
|
- redfish_* modules, redfish_utils module utils - add support for Redfish session
|
||||||
|
create, delete, and authenticate (https://github.com/ansible-collections/community.general/issues/1975).
|
||||||
|
- snmp_facts - added parameters ``timeout`` and ``retries`` to module (https://github.com/ansible-collections/community.general/issues/980).
|
||||||
|
release_summary: Fixes compatibility issues with the latest ansible-core 2.11
|
||||||
|
beta, some more bugs, and contains several new features, modules and plugins.
|
||||||
|
fragments:
|
||||||
|
- 1949-proxmox-inventory-tags.yml
|
||||||
|
- 1977-jenkinsjob-validate-certs.yml
|
||||||
|
- 1991-proxmox-inventory-fix-template-in-pool.yml
|
||||||
|
- 1999-proxmox-fix-issue-1955.yml
|
||||||
|
- 2.3.0.yml
|
||||||
|
- 2000-proxmox_kvm-tag-support.yml
|
||||||
|
- 2001-no_log-false.yml
|
||||||
|
- 2006-valmod-batch8.yml
|
||||||
|
- 2013-proxmox-purge-parameter.yml
|
||||||
|
- 2014-allow-root-for-kibana-plugin.yaml
|
||||||
|
- 2024-module-helper-fixes.yml
|
||||||
|
- 2027-add-redfish-session-create-delete-authenticate.yml
|
||||||
|
- 2031-ipa_sudorule_add_runasextusers.yml
|
||||||
|
- 2036-scaleway-inventory.yml
|
||||||
|
- 2037-add-from-csv-filter.yml
|
||||||
|
- 2040-fix-index-error-in-redfish-set-manager-nic.yml
|
||||||
|
- 2057-nios-devel.yml
|
||||||
|
- 2061-archive-refactor1.yml
|
||||||
|
- 2065-snmp-facts-timeout.yml
|
||||||
|
- 2072-stacki-host-params-fallback.yml
|
||||||
|
modules:
|
||||||
|
- description: Manage Gandi LiveDNS records
|
||||||
|
name: gandi_livedns
|
||||||
|
namespace: net_tools
|
||||||
|
- description: Manage Pritunl Users using the Pritunl API
|
||||||
|
name: pritunl_user
|
||||||
|
namespace: net_tools.pritunl
|
||||||
|
- description: List Pritunl Users using the Pritunl API
|
||||||
|
name: pritunl_user_info
|
||||||
|
namespace: net_tools.pritunl
|
||||||
|
plugins:
|
||||||
|
filter:
|
||||||
|
- description: Converts CSV text input into list of dicts
|
||||||
|
name: from_csv
|
||||||
|
namespace: null
|
||||||
|
release_date: '2021-03-23'
|
||||||
|
2.4.0:
|
||||||
|
changes:
|
||||||
|
bugfixes:
|
||||||
|
- git_config - fixed scope ``file`` behaviour and added integraton test for
|
||||||
|
it (https://github.com/ansible-collections/community.general/issues/2117).
|
||||||
|
- zypper, zypper_repository - respect ``PATH`` environment variable when resolving
|
||||||
|
zypper executable path (https://github.com/ansible-collections/community.general/pull/2094).
|
||||||
|
minor_changes:
|
||||||
|
- vdo - add ``force`` option (https://github.com/ansible-collections/community.general/issues/2101).
|
||||||
|
release_summary: Regular feature and bugfix release.
|
||||||
|
fragments:
|
||||||
|
- 2.4.0.yml
|
||||||
|
- 2094-bugfix-respect-PATH-env-variable-in-zypper-modules.yaml
|
||||||
|
- 2110-vdo-add_force_option.yaml
|
||||||
|
- 2125-git-config-scope-file.yml
|
||||||
|
modules:
|
||||||
|
- description: Manages OpenNebula templates
|
||||||
|
name: one_template
|
||||||
|
namespace: cloud.opennebula
|
||||||
|
- description: Manages Lenovo Out-Of-Band controllers using Redfish APIs
|
||||||
|
name: xcc_redfish_command
|
||||||
|
namespace: remote_management.lenovoxcc
|
||||||
|
plugins:
|
||||||
|
become:
|
||||||
|
- description: Run tasks using sudo su -
|
||||||
|
name: sudosu
|
||||||
|
namespace: null
|
||||||
|
callback:
|
||||||
|
- description: Posts task results to Azure Log Analytics
|
||||||
|
name: loganalytics
|
||||||
|
namespace: null
|
||||||
|
release_date: '2021-03-30'
|
||||||
|
2.5.0:
|
||||||
|
changes:
|
||||||
|
bugfixes:
|
||||||
|
- dimensiondata_network - bug when formatting message, instead of % a simple
|
||||||
|
comma was used (https://github.com/ansible-collections/community.general/pull/2139).
|
||||||
|
- github_repo - PyGithub bug does not allow explicit port in ``base_url``. Specifying
|
||||||
|
port is not required (https://github.com/PyGithub/PyGithub/issues/1913).
|
||||||
|
- haproxy - fix a bug preventing haproxy from properly entering ``DRAIN`` mode
|
||||||
|
(https://github.com/ansible-collections/community.general/issues/1913).
|
||||||
|
- ipa_user - allow ``sshpubkey`` to permit multiple word comments (https://github.com/ansible-collections/community.general/pull/2159).
|
||||||
|
- 'java_cert - allow setting ``state: absent`` by providing just the ``cert_alias``
|
||||||
|
(https://github.com/ansible/ansible/issues/27982).'
|
||||||
|
- java_cert - properly handle proxy arguments when the scheme is provided (https://github.com/ansible/ansible/issues/54481).
|
||||||
|
- java_keystore - improve error handling and return ``cmd`` as documented. Force
|
||||||
|
``LANG``, ``LC_ALL`` and ``LC_MESSAGES`` environment variables to ``C`` to
|
||||||
|
rely on ``keytool`` output parsing. Fix pylint's ``unused-variable`` and ``no-else-return``
|
||||||
|
hints (https://github.com/ansible-collections/community.general/pull/2183).
|
||||||
|
- java_keystore - use tempfile lib to create temporary files with randomized
|
||||||
|
names, and remove the temporary PKCS#12 keystore as well as other materials
|
||||||
|
(https://github.com/ansible-collections/community.general/issues/1667).
|
||||||
|
- jira - fixed fields' update in ticket transitions (https://github.com/ansible-collections/community.general/issues/818).
|
||||||
|
- kibana_plugin - added missing parameters to ``remove_plugin`` when using ``state=present
|
||||||
|
force=true``, and fix potential quoting errors when invoking ``kibana`` (https://github.com/ansible-collections/community.general/pull/2143).
|
||||||
|
- module_helper module utils - fixed decorator ``cause_changes`` (https://github.com/ansible-collections/community.general/pull/2203).
|
||||||
|
- pkgutil - fixed calls to ``list.extend()`` (https://github.com/ansible-collections/community.general/pull/2161).
|
||||||
|
- vmadm - correct type of list elements in ``resolvers`` parameter (https://github.com/ansible-collections/community.general/issues/2135).
|
||||||
|
- xfconf - module was not honoring check mode when ``state`` was ``absent``
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/2185).
|
||||||
|
minor_changes:
|
||||||
|
- apache2_mod_proxy - refactored/cleaned-up part of the code (https://github.com/ansible-collections/community.general/pull/2142).
|
||||||
|
- atomic_container - using ``get_bin_path()`` before calling ``run_command()``
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/2144).
|
||||||
|
- atomic_host - using ``get_bin_path()`` before calling ``run_command()`` (https://github.com/ansible-collections/community.general/pull/2144).
|
||||||
|
- atomic_image - using ``get_bin_path()`` before calling ``run_command()`` (https://github.com/ansible-collections/community.general/pull/2144).
|
||||||
|
- beadm - minor refactor converting multiple statements to a single list literal
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- bitbucket_pipeline_variable - removed unreachable code (https://github.com/ansible-collections/community.general/pull/2157).
|
||||||
|
- hiera lookup - minor refactor converting multiple statements to a single list
|
||||||
|
literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- ipa_config - add new options ``ipaconfigstring``, ``ipadefaultprimarygroup``,
|
||||||
|
``ipagroupsearchfields``, ``ipahomesrootdir``, ``ipabrkauthzdata``, ``ipamaxusernamelength``,
|
||||||
|
``ipapwdexpadvnotify``, ``ipasearchrecordslimit``, ``ipasearchtimelimit``,
|
||||||
|
``ipauserauthtype``, and ``ipausersearchfields`` (https://github.com/ansible-collections/community.general/pull/2116).
|
||||||
|
- ipa_user - fix ``userauthtype`` option to take in list of strings for the
|
||||||
|
multi-select field instead of single string (https://github.com/ansible-collections/community.general/pull/2174).
|
||||||
|
- ipwcli_dns - minor refactor converting multiple statements to a single list
|
||||||
|
literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- 'java_cert - change ``state: present`` to check certificates by hash, not
|
||||||
|
just alias name (https://github.com/ansible/ansible/issues/43249).'
|
||||||
|
- jira - added ``attach`` operation, which allows a user to attach a file to
|
||||||
|
an issue (https://github.com/ansible-collections/community.general/pull/2192).
|
||||||
|
- jira - added parameter ``account_id`` for compatibility with recent versions
|
||||||
|
of JIRA (https://github.com/ansible-collections/community.general/issues/818,
|
||||||
|
https://github.com/ansible-collections/community.general/pull/1978).
|
||||||
|
- known_hosts module utils - minor refactor converting multiple statements to
|
||||||
|
a single list literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- module_helper module utils - added management of facts and adhoc setting of
|
||||||
|
the initial value for variables (https://github.com/ansible-collections/community.general/pull/2188).
|
||||||
|
- module_helper module utils - added mechanism to manage variables, providing
|
||||||
|
automatic output of variables, change status and diff information (https://github.com/ansible-collections/community.general/pull/2162).
|
||||||
|
- nictagadm - minor refactor converting multiple statements to a single list
|
||||||
|
literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- npm - add ``no_bin_links`` option (https://github.com/ansible-collections/community.general/issues/2128).
|
||||||
|
- ovh_ip_failover - removed unreachable code (https://github.com/ansible-collections/community.general/pull/2157).
|
||||||
|
- proxmox inventory plugin - added ``Constructable`` class to the inventory
|
||||||
|
to provide options ``strict``, ``keyed_groups``, ``groups``, and ``compose``
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/2180).
|
||||||
|
- proxmox inventory plugin - added ``proxmox_agent_interfaces`` fact describing
|
||||||
|
network interfaces returned from a QEMU guest agent (https://github.com/ansible-collections/community.general/pull/2148).
|
||||||
|
- rhevm - removed unreachable code (https://github.com/ansible-collections/community.general/pull/2157).
|
||||||
|
- smartos_image_info - minor refactor converting multiple statements to a single
|
||||||
|
list literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- svr4pkg - minor refactor converting multiple statements to a single list literal
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- xattr - minor refactor converting multiple statements to a single list literal
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- xfconf - changed implementation to use ``ModuleHelper`` new features (https://github.com/ansible-collections/community.general/pull/2188).
|
||||||
|
- zfs_facts - minor refactor converting multiple statements to a single list
|
||||||
|
literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
- zpool_facts - minor refactor converting multiple statements to a single list
|
||||||
|
literal (https://github.com/ansible-collections/community.general/pull/2160).
|
||||||
|
release_summary: Regular feature release. Will be the last 2.x.0 minor release.
|
||||||
|
security_fixes:
|
||||||
|
- java_cert - remove password from ``run_command`` arguments (https://github.com/ansible-collections/community.general/pull/2008).
|
||||||
|
- java_keystore - pass secret to keytool through an environment variable to
|
||||||
|
not expose it as a commandline argument (https://github.com/ansible-collections/community.general/issues/1668).
|
||||||
|
fragments:
|
||||||
|
- 1978-jira-transition-logic.yml
|
||||||
|
- 1993-haproxy-fix-draining.yml
|
||||||
|
- 2.5.0.yml
|
||||||
|
- 2008-update-java-cert-replace-cert-when-changed.yml
|
||||||
|
- 2116-add-fields-to-ipa-config-module.yml
|
||||||
|
- 2135-vmadm-resolvers-type-fix.yml
|
||||||
|
- 2139-dimensiondata_network-str-format.yml
|
||||||
|
- 2142-apache2_mod_proxy-cleanup.yml
|
||||||
|
- 2143-kibana_plugin-fixed-function-calls.yml
|
||||||
|
- 2144-atomic_get_bin_path.yml
|
||||||
|
- 2146-npm-add_no_bin_links_option.yaml
|
||||||
|
- 2148-proxmox-inventory-agent-interfaces.yml
|
||||||
|
- 2157-unreachable-code.yml
|
||||||
|
- 2159-ipa-user-sshpubkey-multi-word-comments.yaml
|
||||||
|
- 2160-list-literals.yml
|
||||||
|
- 2161-pkgutil-list-extend.yml
|
||||||
|
- 2162-modhelper-variables.yml
|
||||||
|
- 2162-proxmox-constructable.yml
|
||||||
|
- 2163-java_keystore_1667_improve_temp_files_storage.yml
|
||||||
|
- 2174-ipa-user-userauthtype-multiselect.yml
|
||||||
|
- 2177-java_keystore_1668_dont_expose_secrets_on_cmdline.yml
|
||||||
|
- 2183-java_keystore_improve_error_handling.yml
|
||||||
|
- 2185-xfconf-absent-check-mode.yml
|
||||||
|
- 2188-xfconf-modhelper-variables.yml
|
||||||
|
- 2192-add-jira-attach.yml
|
||||||
|
- 2203-modhelper-cause-changes-deco.yml
|
||||||
|
- 2204-github_repo-fix-baseurl_port.yml
|
||||||
|
- dict-filter.yml
|
||||||
|
- path_join-shim-filter.yml
|
||||||
|
modules:
|
||||||
|
- description: Manage FreeIPA OTP Configuration Settings
|
||||||
|
name: ipa_otpconfig
|
||||||
|
namespace: identity.ipa
|
||||||
|
- description: Manage FreeIPA OTPs
|
||||||
|
name: ipa_otptoken
|
||||||
|
namespace: identity.ipa
|
||||||
|
- description: Manages Pritunl Organizations using the Pritunl API
|
||||||
|
name: pritunl_org
|
||||||
|
namespace: net_tools.pritunl
|
||||||
|
- description: List Pritunl Organizations using the Pritunl API
|
||||||
|
name: pritunl_org_info
|
||||||
|
namespace: net_tools.pritunl
|
||||||
|
- description: Enforce a model's attributes in CA Spectrum.
|
||||||
|
name: spectrum_model_attrs
|
||||||
|
namespace: monitoring
|
||||||
|
plugins:
|
||||||
|
filter:
|
||||||
|
- description: 'The ``dict`` function as a filter: converts a list of tuples
|
||||||
|
to a dictionary'
|
||||||
|
name: dict
|
||||||
|
namespace: null
|
||||||
|
- description: Redirects to ansible.builtin.path_join for ansible-base 2.10
|
||||||
|
or newer, and provides a compatible implementation for Ansible 2.9
|
||||||
|
name: path_join
|
||||||
|
namespace: null
|
||||||
|
release_date: '2021-04-13'
|
||||||
|
2.5.1:
|
||||||
|
changes:
|
||||||
|
bugfixes:
|
||||||
|
- funcd connection plugin - can now load (https://github.com/ansible-collections/community.general/pull/2235).
|
||||||
|
- jira - fixed calling of ``isinstance`` (https://github.com/ansible-collections/community.general/issues/2234).
|
||||||
|
release_summary: Bugfix release for some bugs discovered right after the 2.5.0
|
||||||
|
release.
|
||||||
|
fragments:
|
||||||
|
- 2.5.1.yml
|
||||||
|
- 2236-jira-isinstance.yml
|
||||||
|
- allow_funcd_to_load.yml
|
||||||
|
release_date: '2021-04-14'
|
||||||
|
2.5.2:
|
||||||
|
changes:
|
||||||
|
bugfixes:
|
||||||
|
- composer - use ``no-interaction`` option when discovering available options
|
||||||
|
to avoid an issue where composer hangs (https://github.com/ansible-collections/community.general/pull/2348).
|
||||||
|
- hiera lookup plugin - converts the return type of plugin to unicode string
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/2329).
|
||||||
|
- influxdb_retention_policy - ensure idempotent module execution with different
|
||||||
|
duration and shard duration parameter values (https://github.com/ansible-collections/community.general/issues/2281).
|
||||||
|
- influxdb_retention_policy - fix bug where ``INF`` duration values failed parsing
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/2385).
|
||||||
|
- inventory and vault scripts - change file permissions to make vendored inventory
|
||||||
|
and vault scripts exectuable (https://github.com/ansible-collections/community.general/pull/2337).
|
||||||
|
- jenkins_plugin - fixes Python 2 compatibility issue (https://github.com/ansible-collections/community.general/pull/2340).
|
||||||
|
- jira - fixed error when loading base64-encoded content as attachment (https://github.com/ansible-collections/community.general/pull/2349).
|
||||||
|
- linode_v4 - changed the error message to point to the correct bugtracker URL
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/2430).
|
||||||
|
- nmap inventory plugin - fix cache and constructed group support (https://github.com/ansible-collections/community.general/issues/2242).
|
||||||
|
- nmcli - compare MAC addresses case insensitively to fix idempotency issue
|
||||||
|
(https://github.com/ansible-collections/community.general/issues/2409).
|
||||||
|
- nmcli - if type is ``bridge-slave`` add ``slave-type bridge`` to ``nmcli``
|
||||||
|
command (https://github.com/ansible-collections/community.general/issues/2408).
|
||||||
|
- one_vm - Allow missing NIC keys (https://github.com/ansible-collections/community.general/pull/2435).
|
||||||
|
- ovirt* modules - remove bad unnecessary import for current ansible-core development
|
||||||
|
version (https://github.com/ansible-collections/community.general/pull/2381).
|
||||||
|
- proxmox inventory - added handling of commas in KVM agent configuration string
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/2245).
|
||||||
|
- puppet - replace ``console` with ``stdout`` in ``logdest`` option when ``all``
|
||||||
|
has been chosen (https://github.com/ansible-collections/community.general/issues/1190).
|
||||||
|
- stackpath_compute inventory script - fix broken validation checks for client
|
||||||
|
ID and client secret (https://github.com/ansible-collections/community.general/pull/2448).
|
||||||
|
- svr4pkg - convert string to a bytes-like object to avoid ``TypeError`` with
|
||||||
|
Python 3 (https://github.com/ansible-collections/community.general/issues/2373).
|
||||||
|
- terraform - fix issue that cause the destroy to fail because from Terraform
|
||||||
|
0.15 on, the ``terraform destroy -force`` option is replaced with ``terraform
|
||||||
|
destroy -auto-approve`` (https://github.com/ansible-collections/community.general/issues/2247).
|
||||||
|
- terraform - fix issue that cause the execution fail because from Terraform
|
||||||
|
0.15 on, the ``-var`` and ``-var-file`` options are no longer available on
|
||||||
|
``terraform validate`` (https://github.com/ansible-collections/community.general/pull/2246).
|
||||||
|
- terraform - remove uses of ``use_unsafe_shell=True`` (https://github.com/ansible-collections/community.general/pull/2246).
|
||||||
|
- zfs - certain ZFS properties, especially sizes, would lead to a task being
|
||||||
|
falsely marked as "changed" even when no actual change was made (https://github.com/ansible-collections/community.general/issues/975,
|
||||||
|
https://github.com/ansible-collections/community.general/pull/2454).
|
||||||
|
release_summary: Regular bugfix release.
|
||||||
|
fragments:
|
||||||
|
- 2.5.2.yml
|
||||||
|
- 2245-proxmox_fix_agent_string_handling.yml
|
||||||
|
- 2246-terraform.yaml
|
||||||
|
- 2282-nmap-fix-cache-support.yml
|
||||||
|
- 2284-influxdb_retention_policy-fix_duration_parsing.yml
|
||||||
|
- 2284-influxdb_retention_policy-idempotence.yml
|
||||||
|
- 2329-hiera-lookup-plugin-return-type.yaml
|
||||||
|
- 2337-mark-inventory-scripts-executable.yml
|
||||||
|
- 2340-jenkins_plugin-py2.yml
|
||||||
|
- 2348-composer-no-interaction-option-discovery-to-avoid-hang.yaml
|
||||||
|
- 2349-jira-bugfix-b64decode.yml
|
||||||
|
- 2373-svr4pkg-fix-typeerror.yml
|
||||||
|
- 2407-puppet-change_stdout_to_console.yaml
|
||||||
|
- 2409-nmcli_add_slave-type_bridge_to_nmcli_command_if_type_is_bridge-slave.yml
|
||||||
|
- 2416-nmcli_compare_mac_addresses_case_insensitively.yml
|
||||||
|
- 2430-linodev4-error-message.yml
|
||||||
|
- 2435-one_vm-fix_missing_keys.yml
|
||||||
|
- 2448-stackpath_compute-fix.yml
|
||||||
|
- 2454-detect_zfs_changed.yml
|
||||||
|
- ovirt-fixup.yml
|
||||||
|
release_date: '2021-05-11'
|
||||||
|
2.5.3:
|
||||||
|
changes:
|
||||||
|
bugfixes:
|
||||||
|
- consul_acl - update the hcl allowlist to include all supported options (https://github.com/ansible-collections/community.general/pull/2495).
|
||||||
|
- consul_kv lookup plugin - allow to set ``recurse``, ``index``, ``datacenter``
|
||||||
|
and ``token`` as keyword arguments (https://github.com/ansible-collections/community.general/issues/2124).
|
||||||
|
- influxdb_user - allow creation of admin users when InfluxDB authentication
|
||||||
|
is enabled but no other user exists on the database. In this scenario, InfluxDB
|
||||||
|
1.x allows only ``CREATE USER`` queries and rejects any other query (https://github.com/ansible-collections/community.general/issues/2364).
|
||||||
|
- influxdb_user - fix bug where an influxdb user has no privileges for 2 or
|
||||||
|
more databases (https://github.com/ansible-collections/community.general/pull/2499).
|
||||||
|
- influxdb_user - fix bug which removed current privileges instead of appending
|
||||||
|
them to existing ones (https://github.com/ansible-collections/community.general/issues/2609,
|
||||||
|
https://github.com/ansible-collections/community.general/pull/2614).
|
||||||
|
- iptables_state - call ``async_status`` action plugin rather than its module
|
||||||
|
(https://github.com/ansible-collections/community.general/issues/2700).
|
||||||
|
- iptables_state - fix a 'FutureWarning' in a regex and do some basic code clean
|
||||||
|
up (https://github.com/ansible-collections/community.general/pull/2525).
|
||||||
|
- iptables_state - fix a broken query of ``async_status`` result with current
|
||||||
|
ansible-core development version (https://github.com/ansible-collections/community.general/issues/2627,
|
||||||
|
https://github.com/ansible-collections/community.general/pull/2671).
|
||||||
|
- iptables_state - fix initialization of iptables from null state when adressing
|
||||||
|
more than one table (https://github.com/ansible-collections/community.general/issues/2523).
|
||||||
|
- java_cert - fix issue with incorrect alias used on PKCS#12 certificate import
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/2560).
|
||||||
|
- jenkins_plugin - use POST method for sending request to jenkins API when ``state``
|
||||||
|
option is one of ``enabled``, ``disabled``, ``pinned``, ``unpinned``, or ``absent``
|
||||||
|
(https://github.com/ansible-collections/community.general/issues/2510).
|
||||||
|
- json_query filter plugin - avoid 'unknown type' errors for more Ansible internal
|
||||||
|
types (https://github.com/ansible-collections/community.general/pull/2607).
|
||||||
|
- module_helper module utils - ``CmdMixin`` must also use ``LC_ALL`` to enforce
|
||||||
|
locale choice (https://github.com/ansible-collections/community.general/pull/2731).
|
||||||
|
- netcup_dns - use ``str(ex)`` instead of unreliable ``ex.message`` in exception
|
||||||
|
handling to fix ``AttributeError`` in error cases (https://github.com/ansible-collections/community.general/pull/2590).
|
||||||
|
- nmap inventory plugin - fix local variable error when cache is disabled (https://github.com/ansible-collections/community.general/issues/2512).
|
||||||
|
- ovir4 inventory script - improve configparser creation to avoid crashes for
|
||||||
|
options without values (https://github.com/ansible-collections/community.general/issues/674).
|
||||||
|
- proxmox_kvm - fixed ``vmid`` return value when VM with ``name`` already exists
|
||||||
|
(https://github.com/ansible-collections/community.general/issues/2648).
|
||||||
|
- redis cache - improved connection string parsing (https://github.com/ansible-collections/community.general/issues/497).
|
||||||
|
- rhsm_release - fix the issue that module considers 8, 7Client and 7Workstation
|
||||||
|
as invalid releases (https://github.com/ansible-collections/community.general/pull/2571).
|
||||||
|
- ssh_config - reduce stormssh searches based on host (https://github.com/ansible-collections/community.general/pull/2568/).
|
||||||
|
- terraform - ensure the workspace is set back to its previous value when the
|
||||||
|
apply fails (https://github.com/ansible-collections/community.general/pull/2634).
|
||||||
|
- xfconf - also use ``LC_ALL`` to enforce locale choice (https://github.com/ansible-collections/community.general/issues/2715).
|
||||||
|
- zypper_repository - fix idempotency on adding repository with ``$releasever``
|
||||||
|
and ``$basearch`` variables (https://github.com/ansible-collections/community.general/issues/1985).
|
||||||
|
release_summary: Regular bugfix release.
|
||||||
|
fragments:
|
||||||
|
- 1085-consul-acl-hcl-whitelist-update.yml
|
||||||
|
- 2.5.3.yml
|
||||||
|
- 2126-consul_kv-pass-token.yml
|
||||||
|
- 2364-influxdb_user-first_user.yml
|
||||||
|
- 2461-ovirt4-fix-configparser.yml
|
||||||
|
- 2499-influxdb_user-fix-multiple-no-privileges.yml
|
||||||
|
- 2510-jenkins_plugin_use_post_method.yml
|
||||||
|
- 2518-nmap-fix-cache-disabled.yml
|
||||||
|
- 2525-iptables_state-fix-initialization-command.yml
|
||||||
|
- 2560-java_cert-pkcs12-alias-bugfix.yml
|
||||||
|
- 2568-ssh_config-reduce-stormssh-searches-based-on-host.yml
|
||||||
|
- 2571-rhsm_release-fix-release_matcher.yaml
|
||||||
|
- 2579-redis-cache-ipv6.yml
|
||||||
|
- 2590-netcup_dns-exception-no-message-attr.yml
|
||||||
|
- 2614-influxdb_user-fix-issue-introduced-in-PR#2499.yml
|
||||||
|
- 2634-terraform-switch-workspace.yml
|
||||||
|
- 2648-proxmox_kvm-fix-vmid-return-value.yml
|
||||||
|
- 2671-fix-broken-query-of-async_status-result.yml
|
||||||
|
- 2711-fix-iptables_state-2700-async_status-call.yml
|
||||||
|
- 2722-zypper_repository-fix_idempotency_on_adding_repo_with_releasever.yml
|
||||||
|
- 2731-mh-cmd-locale.yml
|
||||||
|
- json_query_more_types.yml
|
||||||
|
release_date: '2021-06-08'
|
||||||
|
|||||||
74
commit-rights.md
Normal file
74
commit-rights.md
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
Committers Guidelines for community.general
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
This document is based on the [Ansible committer guidelines](https://github.com/ansible/ansible/blob/b57444af14062ec96e0af75fdfc2098c74fe2d9a/docs/docsite/rst/community/committer_guidelines.rst) ([latest version](https://docs.ansible.com/ansible/devel/community/committer_guidelines.html)).
|
||||||
|
|
||||||
|
These are the guidelines for people with commit privileges on the Ansible Community General Collection GitHub repository. Please read the guidelines before you commit.
|
||||||
|
|
||||||
|
These guidelines apply to everyone. At the same time, this is NOT a process document. So just use good judgment. You have been given commit access because we trust your judgment.
|
||||||
|
|
||||||
|
That said, use the trust wisely.
|
||||||
|
|
||||||
|
If you abuse the trust and break components and builds, and so on, the trust level falls and you may be asked not to commit or you may lose your commit privileges.
|
||||||
|
|
||||||
|
Our workflow on GitHub
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
As a committer, you may already know this, but our workflow forms a lot of our team policies. Please ensure you are aware of the following workflow steps:
|
||||||
|
|
||||||
|
* Fork the repository upon which you want to do some work to your own personal repository
|
||||||
|
* Work on the specific branch upon which you need to commit
|
||||||
|
* Create a Pull Request back to the collection repository and await reviews
|
||||||
|
* Adjust code as necessary based on the Comments provided
|
||||||
|
* Ask someone from the other committers to do a final review and merge
|
||||||
|
|
||||||
|
Sometimes, committers merge their own pull requests. This section is a set of guidelines. If you are changing a comma in a doc or making a very minor change, you can use your best judgement. This is another trust thing. The process is critical for any major change, but for little things or getting something done quickly, use your best judgement and make sure people on the team are aware of your work.
|
||||||
|
|
||||||
|
Roles
|
||||||
|
-----
|
||||||
|
* Release managers: Merge pull requests to `stable-X` branches, create tags to do releases.
|
||||||
|
* Committers: Fine to do PRs for most things, but we should have a timebox. Hanging PRs may merge on the judgement of these devs.
|
||||||
|
* Module maintainers: Module maintainers own specific modules and have indirect commit access through the current module PR mechanisms. This is primary [ansibullbot](https://github.com/ansibullbot)'s `shipit` mechanism.
|
||||||
|
|
||||||
|
General rules
|
||||||
|
-------------
|
||||||
|
Individuals with direct commit access to this collection repository are entrusted with powers that allow them to do a broad variety of things--probably more than we can write down. Rather than rules, treat these as general *guidelines*, individuals with this power are expected to use their best judgement.
|
||||||
|
|
||||||
|
* Do NOTs:
|
||||||
|
|
||||||
|
- Do not commit directly.
|
||||||
|
- Do not merge your own PRs. Someone else should have a chance to review and approve the PR merge. You have a small amount of leeway here for very minor changes.
|
||||||
|
- Do not forget about non-standard / alternate environments. Consider the alternatives. Yes, people have bad/unusual/strange environments (like binaries from multiple init systems installed), but they are the ones who need us the most.
|
||||||
|
- Do not drag your community team members down. Discuss the technical merits of any pull requests you review. Avoid negativity and personal comments. For more guidance on being a good community member, read the [Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
|
||||||
|
- Do not forget about the maintenance burden. High-maintenance features may not be worth adding.
|
||||||
|
- Do not break playbooks. Always keep backwards compatibility in mind.
|
||||||
|
- Do not forget to keep it simple. Complexity breeds all kinds of problems.
|
||||||
|
- Do not merge to branches other than `main`, especially not to `stable-X`, if you do not have explicit permission to do so.
|
||||||
|
- Do not create tags. Tags are used in the release process, and should only be created by the people responsible for managing the stable branches.
|
||||||
|
|
||||||
|
* Do:
|
||||||
|
|
||||||
|
- Squash, avoid merges whenever possible, use GitHub's squash commits or cherry pick if needed (bisect thanks you).
|
||||||
|
- Be active. Committers who have no activity on the project (through merges, triage, commits, and so on) will have their permissions suspended.
|
||||||
|
- Consider backwards compatibility (goes back to "do not break existing playbooks").
|
||||||
|
- Write tests. PRs with tests are looked at with more priority than PRs without tests that should have them included. While not all changes require tests, be sure to add them for bug fixes or functionality changes.
|
||||||
|
- Discuss with other committers, specially when you are unsure of something.
|
||||||
|
- Document! If your PR is a new feature or a change to behavior, make sure you've updated all associated documentation or have notified the right people to do so.
|
||||||
|
- Consider scope, sometimes a fix can be generalized.
|
||||||
|
- Keep it simple, then things are maintainable, debuggable and intelligible.
|
||||||
|
|
||||||
|
Committers are expected to continue to follow the same community and contribution guidelines followed by the rest of the Ansible community.
|
||||||
|
|
||||||
|
|
||||||
|
People
|
||||||
|
------
|
||||||
|
|
||||||
|
Individuals who have been asked to become a part of this group have generally been contributing in significant ways to the community.general collection for some time. Should they agree, they are requested to add their names and GitHub IDs to this file, in the section below, through a pull request. Doing so indicates that these individuals agree to act in the ways that their fellow committers trust that they will act.
|
||||||
|
|
||||||
|
| Name | GitHub ID | IRC Nick | Other |
|
||||||
|
| ------------------- | -------------------- | ------------------ | -------------------- |
|
||||||
|
| Alexei Znamensky | russoz | russoz | |
|
||||||
|
| Amin Vakil | aminvakil | aminvakil | |
|
||||||
|
| Andrew Klychkov | andersson007 | andersson007_ | |
|
||||||
|
| Felix Fontein | felixfontein | felixfontein | |
|
||||||
|
| John R Barker | gundalow | gundalow | |
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
namespace: community
|
namespace: community
|
||||||
name: general
|
name: general
|
||||||
version: 2.2.0
|
version: 2.5.3
|
||||||
readme: README.md
|
readme: README.md
|
||||||
authors:
|
authors:
|
||||||
- Ansible (https://github.com/ansible)
|
- Ansible (https://github.com/ansible)
|
||||||
|
|||||||
@@ -601,3 +601,10 @@ plugin_routing:
|
|||||||
redirect: community.docker.docker_swarm
|
redirect: community.docker.docker_swarm
|
||||||
kubevirt:
|
kubevirt:
|
||||||
redirect: community.kubevirt.kubevirt
|
redirect: community.kubevirt.kubevirt
|
||||||
|
filter:
|
||||||
|
path_join:
|
||||||
|
# The ansible.builtin.path_join filter has been added in ansible-base 2.10.
|
||||||
|
# Since plugin routing is only available since ansible-base 2.10, this
|
||||||
|
# redirect will be used for ansible-base 2.10 or later, and the included
|
||||||
|
# path_join filter will be used for Ansible 2.9 or earlier.
|
||||||
|
redirect: ansible.builtin.path_join
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ __metaclass__ = type
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
from ansible.plugins.action import ActionBase
|
from ansible.plugins.action import ActionBase
|
||||||
from ansible.errors import AnsibleError, AnsibleActionFail, AnsibleConnectionFailure
|
from ansible.errors import AnsibleActionFail, AnsibleConnectionFailure
|
||||||
from ansible.utils.vars import merge_hash
|
from ansible.utils.vars import merge_hash
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
@@ -40,19 +40,27 @@ class ActionModule(ActionBase):
|
|||||||
"(=%s) to 0, and 'async' (=%s) to a value >2 and not greater than "
|
"(=%s) to 0, and 'async' (=%s) to a value >2 and not greater than "
|
||||||
"'ansible_timeout' (=%s) (recommended).")
|
"'ansible_timeout' (=%s) (recommended).")
|
||||||
|
|
||||||
def _async_result(self, module_args, task_vars, timeout):
|
def _async_result(self, async_status_args, task_vars, timeout):
|
||||||
'''
|
'''
|
||||||
Retrieve results of the asynchonous task, and display them in place of
|
Retrieve results of the asynchonous task, and display them in place of
|
||||||
the async wrapper results (those with the ansible_job_id key).
|
the async wrapper results (those with the ansible_job_id key).
|
||||||
'''
|
'''
|
||||||
|
async_status = self._task.copy()
|
||||||
|
async_status.args = async_status_args
|
||||||
|
async_status.action = 'ansible.builtin.async_status'
|
||||||
|
async_status.async_val = 0
|
||||||
|
async_action = self._shared_loader_obj.action_loader.get(
|
||||||
|
async_status.action, task=async_status, connection=self._connection,
|
||||||
|
play_context=self._play_context, loader=self._loader, templar=self._templar,
|
||||||
|
shared_loader_obj=self._shared_loader_obj)
|
||||||
|
|
||||||
|
if async_status.args['mode'] == 'cleanup':
|
||||||
|
return async_action.run(task_vars=task_vars)
|
||||||
|
|
||||||
# At least one iteration is required, even if timeout is 0.
|
# At least one iteration is required, even if timeout is 0.
|
||||||
for i in range(max(1, timeout)):
|
for dummy in range(max(1, timeout)):
|
||||||
async_result = self._execute_module(
|
async_result = async_action.run(task_vars=task_vars)
|
||||||
module_name='ansible.builtin.async_status',
|
if async_result.get('finished', 0) == 1:
|
||||||
module_args=module_args,
|
|
||||||
task_vars=task_vars,
|
|
||||||
wrap_async=False)
|
|
||||||
if async_result['finished'] == 1:
|
|
||||||
break
|
break
|
||||||
time.sleep(min(1, timeout))
|
time.sleep(min(1, timeout))
|
||||||
|
|
||||||
@@ -76,7 +84,6 @@ class ActionModule(ActionBase):
|
|||||||
task_async = self._task.async_val
|
task_async = self._task.async_val
|
||||||
check_mode = self._play_context.check_mode
|
check_mode = self._play_context.check_mode
|
||||||
max_timeout = self._connection._play_context.timeout
|
max_timeout = self._connection._play_context.timeout
|
||||||
module_name = self._task.action
|
|
||||||
module_args = self._task.args
|
module_args = self._task.args
|
||||||
|
|
||||||
if module_args.get('state', None) == 'restored':
|
if module_args.get('state', None) == 'restored':
|
||||||
@@ -107,7 +114,7 @@ class ActionModule(ActionBase):
|
|||||||
# longer on the controller); and set a backup file path.
|
# longer on the controller); and set a backup file path.
|
||||||
module_args['_timeout'] = task_async
|
module_args['_timeout'] = task_async
|
||||||
module_args['_back'] = '%s/iptables.state' % async_dir
|
module_args['_back'] = '%s/iptables.state' % async_dir
|
||||||
async_status_args = dict(_async_dir=async_dir)
|
async_status_args = dict(mode='status')
|
||||||
confirm_cmd = 'rm -f %s' % module_args['_back']
|
confirm_cmd = 'rm -f %s' % module_args['_back']
|
||||||
starter_cmd = 'touch %s.starter' % module_args['_back']
|
starter_cmd = 'touch %s.starter' % module_args['_back']
|
||||||
remaining_time = max(task_async, max_timeout)
|
remaining_time = max(task_async, max_timeout)
|
||||||
@@ -133,7 +140,7 @@ class ActionModule(ActionBase):
|
|||||||
# The module is aware to not process the main iptables-restore
|
# The module is aware to not process the main iptables-restore
|
||||||
# command before finding (and deleting) the 'starter' cookie on
|
# command before finding (and deleting) the 'starter' cookie on
|
||||||
# the host, so the previous query will not reach ssh timeout.
|
# the host, so the previous query will not reach ssh timeout.
|
||||||
garbage = self._low_level_execute_command(starter_cmd, sudoable=self.DEFAULT_SUDOABLE)
|
dummy = self._low_level_execute_command(starter_cmd, sudoable=self.DEFAULT_SUDOABLE)
|
||||||
|
|
||||||
# As the main command is not yet executed on the target, here
|
# As the main command is not yet executed on the target, here
|
||||||
# 'finished' means 'failed before main command be executed'.
|
# 'finished' means 'failed before main command be executed'.
|
||||||
@@ -143,7 +150,7 @@ class ActionModule(ActionBase):
|
|||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
for x in range(max_timeout):
|
for dummy in range(max_timeout):
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
remaining_time -= 1
|
remaining_time -= 1
|
||||||
# - AnsibleConnectionFailure covers rejected requests (i.e.
|
# - AnsibleConnectionFailure covers rejected requests (i.e.
|
||||||
@@ -151,7 +158,7 @@ class ActionModule(ActionBase):
|
|||||||
# - ansible_timeout is able to cover dropped requests (due
|
# - ansible_timeout is able to cover dropped requests (due
|
||||||
# to a rule or policy DROP) if not lower than async_val.
|
# to a rule or policy DROP) if not lower than async_val.
|
||||||
try:
|
try:
|
||||||
garbage = self._low_level_execute_command(confirm_cmd, sudoable=self.DEFAULT_SUDOABLE)
|
dummy = self._low_level_execute_command(confirm_cmd, sudoable=self.DEFAULT_SUDOABLE)
|
||||||
break
|
break
|
||||||
except AnsibleConnectionFailure:
|
except AnsibleConnectionFailure:
|
||||||
continue
|
continue
|
||||||
@@ -164,16 +171,12 @@ class ActionModule(ActionBase):
|
|||||||
del result[key]
|
del result[key]
|
||||||
|
|
||||||
if result.get('invocation', {}).get('module_args'):
|
if result.get('invocation', {}).get('module_args'):
|
||||||
if '_timeout' in result['invocation']['module_args']:
|
for key in ('_back', '_timeout', '_async_dir', 'jid'):
|
||||||
del result['invocation']['module_args']['_back']
|
if result['invocation']['module_args'].get(key):
|
||||||
del result['invocation']['module_args']['_timeout']
|
del result['invocation']['module_args'][key]
|
||||||
|
|
||||||
async_status_args['mode'] = 'cleanup'
|
async_status_args['mode'] = 'cleanup'
|
||||||
garbage = self._execute_module(
|
dummy = self._async_result(async_status_args, task_vars, 0)
|
||||||
module_name='ansible.builtin.async_status',
|
|
||||||
module_args=async_status_args,
|
|
||||||
task_vars=task_vars,
|
|
||||||
wrap_async=False)
|
|
||||||
|
|
||||||
if not wrap_async:
|
if not wrap_async:
|
||||||
# remove a temporary path we created
|
# remove a temporary path we created
|
||||||
|
|||||||
91
plugins/become/sudosu.py
Normal file
91
plugins/become/sudosu.py
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright: (c) 2021, Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
DOCUMENTATION = """
|
||||||
|
name: sudosu
|
||||||
|
short_description: Run tasks using sudo su -
|
||||||
|
description:
|
||||||
|
- This become plugins allows your remote/login user to execute commands as another user via the C(sudo) and C(su) utilities combined.
|
||||||
|
author:
|
||||||
|
- Dag Wieers (@dagwieers)
|
||||||
|
version_added: 2.4.0
|
||||||
|
options:
|
||||||
|
become_user:
|
||||||
|
description: User you 'become' to execute the task.
|
||||||
|
default: root
|
||||||
|
ini:
|
||||||
|
- section: privilege_escalation
|
||||||
|
key: become_user
|
||||||
|
- section: sudo_become_plugin
|
||||||
|
key: user
|
||||||
|
vars:
|
||||||
|
- name: ansible_become_user
|
||||||
|
- name: ansible_sudo_user
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_BECOME_USER
|
||||||
|
- name: ANSIBLE_SUDO_USER
|
||||||
|
become_flags:
|
||||||
|
description: Options to pass to C(sudo).
|
||||||
|
default: -H -S -n
|
||||||
|
ini:
|
||||||
|
- section: privilege_escalation
|
||||||
|
key: become_flags
|
||||||
|
- section: sudo_become_plugin
|
||||||
|
key: flags
|
||||||
|
vars:
|
||||||
|
- name: ansible_become_flags
|
||||||
|
- name: ansible_sudo_flags
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_BECOME_FLAGS
|
||||||
|
- name: ANSIBLE_SUDO_FLAGS
|
||||||
|
become_pass:
|
||||||
|
description: Password to pass to C(sudo).
|
||||||
|
required: false
|
||||||
|
vars:
|
||||||
|
- name: ansible_become_password
|
||||||
|
- name: ansible_become_pass
|
||||||
|
- name: ansible_sudo_pass
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_BECOME_PASS
|
||||||
|
- name: ANSIBLE_SUDO_PASS
|
||||||
|
ini:
|
||||||
|
- section: sudo_become_plugin
|
||||||
|
key: password
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
from ansible.plugins.become import BecomeBase
|
||||||
|
|
||||||
|
|
||||||
|
class BecomeModule(BecomeBase):
|
||||||
|
|
||||||
|
name = 'community.general.sudosu'
|
||||||
|
|
||||||
|
# messages for detecting prompted password issues
|
||||||
|
fail = ('Sorry, try again.',)
|
||||||
|
missing = ('Sorry, a password is required to run sudo', 'sudo: a password is required')
|
||||||
|
|
||||||
|
def build_become_command(self, cmd, shell):
|
||||||
|
super(BecomeModule, self).build_become_command(cmd, shell)
|
||||||
|
|
||||||
|
if not cmd:
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
becomecmd = 'sudo'
|
||||||
|
|
||||||
|
flags = self.get_option('become_flags') or ''
|
||||||
|
prompt = ''
|
||||||
|
if self.get_option('become_pass'):
|
||||||
|
self.prompt = '[sudo via ansible, key=%s] password:' % self._id
|
||||||
|
if flags: # this could be simplified, but kept as is for now for backwards string matching
|
||||||
|
flags = flags.replace('-n', '')
|
||||||
|
prompt = '-p "%s"' % (self.prompt)
|
||||||
|
|
||||||
|
user = self.get_option('become_user') or ''
|
||||||
|
if user:
|
||||||
|
user = '%s' % (user)
|
||||||
|
|
||||||
|
return ' '.join([becomecmd, flags, prompt, 'su -l', user, self._build_success_command(cmd, shell)])
|
||||||
14
plugins/cache/redis.py
vendored
14
plugins/cache/redis.py
vendored
@@ -61,6 +61,7 @@ DOCUMENTATION = '''
|
|||||||
type: integer
|
type: integer
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
import re
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
|
|
||||||
@@ -91,6 +92,8 @@ class CacheModule(BaseCacheModule):
|
|||||||
performance.
|
performance.
|
||||||
"""
|
"""
|
||||||
_sentinel_service_name = None
|
_sentinel_service_name = None
|
||||||
|
re_url_conn = re.compile(r'^([^:]+|\[[^]]+\]):(\d+):(\d+)(?::(.*))?$')
|
||||||
|
re_sent_conn = re.compile(r'^(.*):(\d+)$')
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
uri = ''
|
uri = ''
|
||||||
@@ -130,11 +133,18 @@ class CacheModule(BaseCacheModule):
|
|||||||
self._db = self._get_sentinel_connection(uri, kw)
|
self._db = self._get_sentinel_connection(uri, kw)
|
||||||
# normal connection
|
# normal connection
|
||||||
else:
|
else:
|
||||||
connection = uri.split(':')
|
connection = self._parse_connection(self.re_url_conn, uri)
|
||||||
self._db = StrictRedis(*connection, **kw)
|
self._db = StrictRedis(*connection, **kw)
|
||||||
|
|
||||||
display.vv('Redis connection: %s' % self._db)
|
display.vv('Redis connection: %s' % self._db)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_connection(re_patt, uri):
|
||||||
|
match = re_patt.match(uri)
|
||||||
|
if not match:
|
||||||
|
raise AnsibleError("Unable to parse connection string")
|
||||||
|
return match.groups()
|
||||||
|
|
||||||
def _get_sentinel_connection(self, uri, kw):
|
def _get_sentinel_connection(self, uri, kw):
|
||||||
"""
|
"""
|
||||||
get sentinel connection details from _uri
|
get sentinel connection details from _uri
|
||||||
@@ -158,7 +168,7 @@ class CacheModule(BaseCacheModule):
|
|||||||
except IndexError:
|
except IndexError:
|
||||||
pass # password is optional
|
pass # password is optional
|
||||||
|
|
||||||
sentinels = [tuple(shost.split(':')) for shost in connections]
|
sentinels = [self._parse_connection(self.re_sent_conn, shost) for shost in connections]
|
||||||
display.vv('\nUsing redis sentinels: %s' % sentinels)
|
display.vv('\nUsing redis sentinels: %s' % sentinels)
|
||||||
scon = Sentinel(sentinels, **kw)
|
scon = Sentinel(sentinels, **kw)
|
||||||
try:
|
try:
|
||||||
|
|||||||
234
plugins/callback/loganalytics.py
Normal file
234
plugins/callback/loganalytics.py
Normal file
@@ -0,0 +1,234 @@
|
|||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
DOCUMENTATION = '''
|
||||||
|
name: loganalytics
|
||||||
|
type: aggregate
|
||||||
|
short_description: Posts task results to Azure Log Analytics
|
||||||
|
author: "Cyrus Li (@zhcli) <cyrus1006@gmail.com>"
|
||||||
|
description:
|
||||||
|
- This callback plugin will post task results in JSON formatted to an Azure Log Analytics workspace.
|
||||||
|
- Credits to authors of splunk callback plugin.
|
||||||
|
version_added: "2.4.0"
|
||||||
|
requirements:
|
||||||
|
- Whitelisting this callback plugin.
|
||||||
|
- An Azure log analytics work space has been established.
|
||||||
|
options:
|
||||||
|
workspace_id:
|
||||||
|
description: Workspace ID of the Azure log analytics workspace.
|
||||||
|
required: true
|
||||||
|
env:
|
||||||
|
- name: WORKSPACE_ID
|
||||||
|
ini:
|
||||||
|
- section: callback_loganalytics
|
||||||
|
key: workspace_id
|
||||||
|
shared_key:
|
||||||
|
description: Shared key to connect to Azure log analytics workspace.
|
||||||
|
required: true
|
||||||
|
env:
|
||||||
|
- name: WORKSPACE_SHARED_KEY
|
||||||
|
ini:
|
||||||
|
- section: callback_loganalytics
|
||||||
|
key: shared_key
|
||||||
|
'''
|
||||||
|
|
||||||
|
EXAMPLES = '''
|
||||||
|
examples: |
|
||||||
|
Whitelist the plugin in ansible.cfg:
|
||||||
|
[defaults]
|
||||||
|
callback_whitelist = community.general.loganalytics
|
||||||
|
Set the environment variable:
|
||||||
|
export WORKSPACE_ID=01234567-0123-0123-0123-01234567890a
|
||||||
|
export WORKSPACE_SHARED_KEY=dZD0kCbKl3ehZG6LHFMuhtE0yHiFCmetzFMc2u+roXIUQuatqU924SsAAAAPemhjbGlAemhjbGktTUJQAQIDBA==
|
||||||
|
Or configure the plugin in ansible.cfg in the callback_loganalytics block:
|
||||||
|
[callback_loganalytics]
|
||||||
|
workspace_id = 01234567-0123-0123-0123-01234567890a
|
||||||
|
shared_key = dZD0kCbKl3ehZG6LHFMuhtE0yHiFCmetzFMc2u+roXIUQuatqU924SsAAAAPemhjbGlAemhjbGktTUJQAQIDBA==
|
||||||
|
'''
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import hmac
|
||||||
|
import base64
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
import uuid
|
||||||
|
import socket
|
||||||
|
import getpass
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from os.path import basename
|
||||||
|
|
||||||
|
from ansible.module_utils.urls import open_url
|
||||||
|
from ansible.parsing.ajson import AnsibleJSONEncoder
|
||||||
|
from ansible.plugins.callback import CallbackBase
|
||||||
|
|
||||||
|
|
||||||
|
class AzureLogAnalyticsSource(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.ansible_check_mode = False
|
||||||
|
self.ansible_playbook = ""
|
||||||
|
self.ansible_version = ""
|
||||||
|
self.session = str(uuid.uuid4())
|
||||||
|
self.host = socket.gethostname()
|
||||||
|
self.user = getpass.getuser()
|
||||||
|
self.extra_vars = ""
|
||||||
|
|
||||||
|
def __build_signature(self, date, workspace_id, shared_key, content_length):
|
||||||
|
# Build authorisation signature for Azure log analytics API call
|
||||||
|
sigs = "POST\n{0}\napplication/json\nx-ms-date:{1}\n/api/logs".format(
|
||||||
|
str(content_length), date)
|
||||||
|
utf8_sigs = sigs.encode('utf-8')
|
||||||
|
decoded_shared_key = base64.b64decode(shared_key)
|
||||||
|
hmac_sha256_sigs = hmac.new(
|
||||||
|
decoded_shared_key, utf8_sigs, digestmod=hashlib.sha256).digest()
|
||||||
|
encoded_hash = base64.b64encode(hmac_sha256_sigs).decode('utf-8')
|
||||||
|
signature = "SharedKey {0}:{1}".format(workspace_id, encoded_hash)
|
||||||
|
return signature
|
||||||
|
|
||||||
|
def __build_workspace_url(self, workspace_id):
|
||||||
|
return "https://{0}.ods.opinsights.azure.com/api/logs?api-version=2016-04-01".format(workspace_id)
|
||||||
|
|
||||||
|
def __rfc1123date(self):
|
||||||
|
return datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
|
||||||
|
|
||||||
|
def send_event(self, workspace_id, shared_key, state, result, runtime):
|
||||||
|
if result._task_fields['args'].get('_ansible_check_mode') is True:
|
||||||
|
self.ansible_check_mode = True
|
||||||
|
|
||||||
|
if result._task_fields['args'].get('_ansible_version'):
|
||||||
|
self.ansible_version = \
|
||||||
|
result._task_fields['args'].get('_ansible_version')
|
||||||
|
|
||||||
|
if result._task._role:
|
||||||
|
ansible_role = str(result._task._role)
|
||||||
|
else:
|
||||||
|
ansible_role = None
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
data['uuid'] = result._task._uuid
|
||||||
|
data['session'] = self.session
|
||||||
|
data['status'] = state
|
||||||
|
data['timestamp'] = self.__rfc1123date()
|
||||||
|
data['host'] = self.host
|
||||||
|
data['user'] = self.user
|
||||||
|
data['runtime'] = runtime
|
||||||
|
data['ansible_version'] = self.ansible_version
|
||||||
|
data['ansible_check_mode'] = self.ansible_check_mode
|
||||||
|
data['ansible_host'] = result._host.name
|
||||||
|
data['ansible_playbook'] = self.ansible_playbook
|
||||||
|
data['ansible_role'] = ansible_role
|
||||||
|
data['ansible_task'] = result._task_fields
|
||||||
|
# Removing args since it can contain sensitive data
|
||||||
|
if 'args' in data['ansible_task']:
|
||||||
|
data['ansible_task'].pop('args')
|
||||||
|
data['ansible_result'] = result._result
|
||||||
|
if 'content' in data['ansible_result']:
|
||||||
|
data['ansible_result'].pop('content')
|
||||||
|
|
||||||
|
# Adding extra vars info
|
||||||
|
data['extra_vars'] = self.extra_vars
|
||||||
|
|
||||||
|
# Preparing the playbook logs as JSON format and send to Azure log analytics
|
||||||
|
jsondata = json.dumps({'event': data}, cls=AnsibleJSONEncoder, sort_keys=True)
|
||||||
|
content_length = len(jsondata)
|
||||||
|
rfc1123date = self.__rfc1123date()
|
||||||
|
signature = self.__build_signature(rfc1123date, workspace_id, shared_key, content_length)
|
||||||
|
workspace_url = self.__build_workspace_url(workspace_id)
|
||||||
|
|
||||||
|
open_url(
|
||||||
|
workspace_url,
|
||||||
|
jsondata,
|
||||||
|
headers={
|
||||||
|
'content-type': 'application/json',
|
||||||
|
'Authorization': signature,
|
||||||
|
'Log-Type': 'ansible_playbook',
|
||||||
|
'x-ms-date': rfc1123date
|
||||||
|
},
|
||||||
|
method='POST'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CallbackModule(CallbackBase):
|
||||||
|
CALLBACK_VERSION = 2.0
|
||||||
|
CALLBACK_TYPE = 'aggregate'
|
||||||
|
CALLBACK_NAME = 'loganalytics'
|
||||||
|
CALLBACK_NEEDS_WHITELIST = True
|
||||||
|
|
||||||
|
def __init__(self, display=None):
|
||||||
|
super(CallbackModule, self).__init__(display=display)
|
||||||
|
self.start_datetimes = {} # Collect task start times
|
||||||
|
self.workspace_id = None
|
||||||
|
self.shared_key = None
|
||||||
|
self.loganalytics = AzureLogAnalyticsSource()
|
||||||
|
|
||||||
|
def _seconds_since_start(self, result):
|
||||||
|
return (
|
||||||
|
datetime.utcnow() -
|
||||||
|
self.start_datetimes[result._task._uuid]
|
||||||
|
).total_seconds()
|
||||||
|
|
||||||
|
def set_options(self, task_keys=None, var_options=None, direct=None):
|
||||||
|
super(CallbackModule, self).set_options(task_keys=task_keys, var_options=var_options, direct=direct)
|
||||||
|
self.workspace_id = self.get_option('workspace_id')
|
||||||
|
self.shared_key = self.get_option('shared_key')
|
||||||
|
|
||||||
|
def v2_playbook_on_play_start(self, play):
|
||||||
|
vm = play.get_variable_manager()
|
||||||
|
extra_vars = vm.extra_vars
|
||||||
|
self.loganalytics.extra_vars = extra_vars
|
||||||
|
|
||||||
|
def v2_playbook_on_start(self, playbook):
|
||||||
|
self.loganalytics.ansible_playbook = basename(playbook._file_name)
|
||||||
|
|
||||||
|
def v2_playbook_on_task_start(self, task, is_conditional):
|
||||||
|
self.start_datetimes[task._uuid] = datetime.utcnow()
|
||||||
|
|
||||||
|
def v2_playbook_on_handler_task_start(self, task):
|
||||||
|
self.start_datetimes[task._uuid] = datetime.utcnow()
|
||||||
|
|
||||||
|
def v2_runner_on_ok(self, result, **kwargs):
|
||||||
|
self.loganalytics.send_event(
|
||||||
|
self.workspace_id,
|
||||||
|
self.shared_key,
|
||||||
|
'OK',
|
||||||
|
result,
|
||||||
|
self._seconds_since_start(result)
|
||||||
|
)
|
||||||
|
|
||||||
|
def v2_runner_on_skipped(self, result, **kwargs):
|
||||||
|
self.loganalytics.send_event(
|
||||||
|
self.workspace_id,
|
||||||
|
self.shared_key,
|
||||||
|
'SKIPPED',
|
||||||
|
result,
|
||||||
|
self._seconds_since_start(result)
|
||||||
|
)
|
||||||
|
|
||||||
|
def v2_runner_on_failed(self, result, **kwargs):
|
||||||
|
self.loganalytics.send_event(
|
||||||
|
self.workspace_id,
|
||||||
|
self.shared_key,
|
||||||
|
'FAILED',
|
||||||
|
result,
|
||||||
|
self._seconds_since_start(result)
|
||||||
|
)
|
||||||
|
|
||||||
|
def runner_on_async_failed(self, result, **kwargs):
|
||||||
|
self.loganalytics.send_event(
|
||||||
|
self.workspace_id,
|
||||||
|
self.shared_key,
|
||||||
|
'FAILED',
|
||||||
|
result,
|
||||||
|
self._seconds_since_start(result)
|
||||||
|
)
|
||||||
|
|
||||||
|
def v2_runner_on_unreachable(self, result, **kwargs):
|
||||||
|
self.loganalytics.send_event(
|
||||||
|
self.workspace_id,
|
||||||
|
self.shared_key,
|
||||||
|
'UNREACHABLE',
|
||||||
|
result,
|
||||||
|
self._seconds_since_start(result)
|
||||||
|
)
|
||||||
@@ -37,12 +37,13 @@ import tempfile
|
|||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
|
from ansible.plugins.connection import ConnectionBase
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
display = Display()
|
display = Display()
|
||||||
|
|
||||||
|
|
||||||
class Connection(object):
|
class Connection(ConnectionBase):
|
||||||
''' Func-based connections '''
|
''' Func-based connections '''
|
||||||
|
|
||||||
has_pipelining = False
|
has_pipelining = False
|
||||||
|
|||||||
@@ -13,12 +13,32 @@ class ModuleDocFragment(object):
|
|||||||
DOCUMENTATION = r'''
|
DOCUMENTATION = r'''
|
||||||
options:
|
options:
|
||||||
config:
|
config:
|
||||||
description:
|
description:
|
||||||
- Path to a .json configuration file containing the OneView client configuration.
|
- Path to a .json configuration file containing the OneView client configuration.
|
||||||
The configuration file is optional and when used should be present in the host running the ansible commands.
|
The configuration file is optional and when used should be present in the host running the ansible commands.
|
||||||
If the file path is not provided, the configuration will be loaded from environment variables.
|
If the file path is not provided, the configuration will be loaded from environment variables.
|
||||||
For links to example configuration files or how to use the environment variables verify the notes section.
|
For links to example configuration files or how to use the environment variables verify the notes section.
|
||||||
type: path
|
type: path
|
||||||
|
api_version:
|
||||||
|
description:
|
||||||
|
- OneView API Version.
|
||||||
|
type: int
|
||||||
|
image_streamer_hostname:
|
||||||
|
description:
|
||||||
|
- IP address or hostname for the HPE Image Streamer REST API.
|
||||||
|
type: str
|
||||||
|
hostname:
|
||||||
|
description:
|
||||||
|
- IP address or hostname for the appliance.
|
||||||
|
type: str
|
||||||
|
username:
|
||||||
|
description:
|
||||||
|
- Username for API authentication.
|
||||||
|
type: str
|
||||||
|
password:
|
||||||
|
description:
|
||||||
|
- Password for API authentication.
|
||||||
|
type: str
|
||||||
|
|
||||||
requirements:
|
requirements:
|
||||||
- python >= 2.7.9
|
- python >= 2.7.9
|
||||||
|
|||||||
43
plugins/doc_fragments/pritunl.py
Normal file
43
plugins/doc_fragments/pritunl.py
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright: (c) 2021, Florian Dambrine <android.florian@gmail.com>
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleDocFragment(object):
|
||||||
|
|
||||||
|
DOCUMENTATION = r"""
|
||||||
|
options:
|
||||||
|
pritunl_url:
|
||||||
|
type: str
|
||||||
|
required: true
|
||||||
|
description:
|
||||||
|
- URL and port of the Pritunl server on which the API is enabled.
|
||||||
|
|
||||||
|
pritunl_api_token:
|
||||||
|
type: str
|
||||||
|
required: true
|
||||||
|
description:
|
||||||
|
- API Token of a Pritunl admin user.
|
||||||
|
- It needs to be enabled in Administrators > USERNAME > Enable Token Authentication.
|
||||||
|
|
||||||
|
pritunl_api_secret:
|
||||||
|
type: str
|
||||||
|
required: true
|
||||||
|
description:
|
||||||
|
- API Secret found in Administrators > USERNAME > API Secret.
|
||||||
|
|
||||||
|
validate_certs:
|
||||||
|
type: bool
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
description:
|
||||||
|
- If certificates should be validated or not.
|
||||||
|
- This should never be set to C(false), except if you are very sure that
|
||||||
|
your connection to the server can not be subject to a Man In The Middle
|
||||||
|
attack.
|
||||||
|
"""
|
||||||
24
plugins/filter/dict.py
Normal file
24
plugins/filter/dict.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright: (c) 2021, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
def dict_filter(sequence):
|
||||||
|
'''Convert a list of tuples to a dictionary.
|
||||||
|
|
||||||
|
Example: ``[[1, 2], ['a', 'b']] | community.general.dict`` results in ``{1: 2, 'a': 'b'}``
|
||||||
|
'''
|
||||||
|
return dict(sequence)
|
||||||
|
|
||||||
|
|
||||||
|
class FilterModule(object):
|
||||||
|
'''Ansible jinja2 filters'''
|
||||||
|
|
||||||
|
def filters(self):
|
||||||
|
return {
|
||||||
|
'dict': dict_filter,
|
||||||
|
}
|
||||||
49
plugins/filter/from_csv.py
Normal file
49
plugins/filter/from_csv.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright: (c) 2021, Andrew Pantuso (@ajpantuso) <ajpantuso@gmail.com>
|
||||||
|
# Copyright: (c) 2018, Dag Wieers (@dagwieers) <dag@wieers.com>
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
from ansible.errors import AnsibleFilterError
|
||||||
|
from ansible.module_utils._text import to_native
|
||||||
|
|
||||||
|
from ansible_collections.community.general.plugins.module_utils.csv import (initialize_dialect, read_csv, CSVError,
|
||||||
|
DialectNotAvailableError,
|
||||||
|
CustomDialectFailureError)
|
||||||
|
|
||||||
|
|
||||||
|
def from_csv(data, dialect='excel', fieldnames=None, delimiter=None, skipinitialspace=None, strict=None):
|
||||||
|
|
||||||
|
dialect_params = {
|
||||||
|
"delimiter": delimiter,
|
||||||
|
"skipinitialspace": skipinitialspace,
|
||||||
|
"strict": strict,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
dialect = initialize_dialect(dialect, **dialect_params)
|
||||||
|
except (CustomDialectFailureError, DialectNotAvailableError) as e:
|
||||||
|
raise AnsibleFilterError(to_native(e))
|
||||||
|
|
||||||
|
reader = read_csv(data, dialect, fieldnames)
|
||||||
|
|
||||||
|
data_list = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
for row in reader:
|
||||||
|
data_list.append(row)
|
||||||
|
except CSVError as e:
|
||||||
|
raise AnsibleFilterError("Unable to process file: %s" % to_native(e))
|
||||||
|
|
||||||
|
return data_list
|
||||||
|
|
||||||
|
|
||||||
|
class FilterModule(object):
|
||||||
|
|
||||||
|
def filters(self):
|
||||||
|
return {
|
||||||
|
'from_csv': from_csv
|
||||||
|
}
|
||||||
@@ -35,9 +35,11 @@ def json_query(data, expr):
|
|||||||
raise AnsibleError('You need to install "jmespath" prior to running '
|
raise AnsibleError('You need to install "jmespath" prior to running '
|
||||||
'json_query filter')
|
'json_query filter')
|
||||||
|
|
||||||
# Hack to handle Ansible String Types
|
# Hack to handle Ansible Unsafe text, AnsibleMapping and AnsibleSequence
|
||||||
# See issue: https://github.com/ansible-collections/community.general/issues/320
|
# See issue: https://github.com/ansible-collections/community.general/issues/320
|
||||||
jmespath.functions.REVERSE_TYPES_MAP['string'] = jmespath.functions.REVERSE_TYPES_MAP['string'] + ('AnsibleUnicode', 'AnsibleUnsafeText', )
|
jmespath.functions.REVERSE_TYPES_MAP['string'] = jmespath.functions.REVERSE_TYPES_MAP['string'] + ('AnsibleUnicode', 'AnsibleUnsafeText', )
|
||||||
|
jmespath.functions.REVERSE_TYPES_MAP['array'] = jmespath.functions.REVERSE_TYPES_MAP['array'] + ('AnsibleSequence', )
|
||||||
|
jmespath.functions.REVERSE_TYPES_MAP['object'] = jmespath.functions.REVERSE_TYPES_MAP['object'] + ('AnsibleMapping', )
|
||||||
try:
|
try:
|
||||||
return jmespath.search(expr, data)
|
return jmespath.search(expr, data)
|
||||||
except jmespath.exceptions.JMESPathError as e:
|
except jmespath.exceptions.JMESPathError as e:
|
||||||
|
|||||||
28
plugins/filter/path_join_shim.py
Normal file
28
plugins/filter/path_join_shim.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright: (c) 2020-2021, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
|
||||||
|
|
||||||
|
def path_join(list):
|
||||||
|
'''Join list of paths.
|
||||||
|
|
||||||
|
This is a minimal shim for ansible.builtin.path_join included in ansible-base 2.10.
|
||||||
|
This should only be called by Ansible 2.9 or earlier. See meta/runtime.yml for details.
|
||||||
|
'''
|
||||||
|
return os.path.join(*list)
|
||||||
|
|
||||||
|
|
||||||
|
class FilterModule(object):
|
||||||
|
'''Ansible jinja2 filters'''
|
||||||
|
|
||||||
|
def filters(self):
|
||||||
|
return {
|
||||||
|
'path_join': path_join,
|
||||||
|
}
|
||||||
@@ -71,6 +71,25 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
|||||||
self._nmap = None
|
self._nmap = None
|
||||||
super(InventoryModule, self).__init__()
|
super(InventoryModule, self).__init__()
|
||||||
|
|
||||||
|
def _populate(self, hosts):
|
||||||
|
# Use constructed if applicable
|
||||||
|
strict = self.get_option('strict')
|
||||||
|
|
||||||
|
for host in hosts:
|
||||||
|
hostname = host['name']
|
||||||
|
self.inventory.add_host(hostname)
|
||||||
|
for var, value in host.items():
|
||||||
|
self.inventory.set_variable(hostname, var, value)
|
||||||
|
|
||||||
|
# Composed variables
|
||||||
|
self._set_composite_vars(self.get_option('compose'), host, hostname, strict=strict)
|
||||||
|
|
||||||
|
# Complex groups based on jinja2 conditionals, hosts that meet the conditional are added to group
|
||||||
|
self._add_host_to_composed_groups(self.get_option('groups'), host, hostname, strict=strict)
|
||||||
|
|
||||||
|
# Create groups based on variable values and add the corresponding hosts to it
|
||||||
|
self._add_host_to_keyed_groups(self.get_option('keyed_groups'), host, hostname, strict=strict)
|
||||||
|
|
||||||
def verify_file(self, path):
|
def verify_file(self, path):
|
||||||
|
|
||||||
valid = False
|
valid = False
|
||||||
@@ -82,7 +101,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
|||||||
|
|
||||||
return valid
|
return valid
|
||||||
|
|
||||||
def parse(self, inventory, loader, path, cache=False):
|
def parse(self, inventory, loader, path, cache=True):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._nmap = get_bin_path('nmap')
|
self._nmap = get_bin_path('nmap')
|
||||||
@@ -93,75 +112,102 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
|||||||
|
|
||||||
self._read_config_data(path)
|
self._read_config_data(path)
|
||||||
|
|
||||||
# setup command
|
cache_key = self.get_cache_key(path)
|
||||||
cmd = [self._nmap]
|
|
||||||
if not self._options['ports']:
|
|
||||||
cmd.append('-sP')
|
|
||||||
|
|
||||||
if self._options['ipv4'] and not self._options['ipv6']:
|
# cache may be True or False at this point to indicate if the inventory is being refreshed
|
||||||
cmd.append('-4')
|
# get the user's cache option too to see if we should save the cache if it is changing
|
||||||
elif self._options['ipv6'] and not self._options['ipv4']:
|
user_cache_setting = self.get_option('cache')
|
||||||
cmd.append('-6')
|
|
||||||
elif not self._options['ipv6'] and not self._options['ipv4']:
|
|
||||||
raise AnsibleParserError('One of ipv4 or ipv6 must be enabled for this plugin')
|
|
||||||
|
|
||||||
if self._options['exclude']:
|
# read if the user has caching enabled and the cache isn't being refreshed
|
||||||
cmd.append('--exclude')
|
attempt_to_read_cache = user_cache_setting and cache
|
||||||
cmd.append(','.join(self._options['exclude']))
|
# update if the user has caching enabled and the cache is being refreshed; update this value to True if the cache has expired below
|
||||||
|
cache_needs_update = user_cache_setting and not cache
|
||||||
cmd.append(self._options['address'])
|
|
||||||
try:
|
|
||||||
# execute
|
|
||||||
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
|
|
||||||
stdout, stderr = p.communicate()
|
|
||||||
if p.returncode != 0:
|
|
||||||
raise AnsibleParserError('Failed to run nmap, rc=%s: %s' % (p.returncode, to_native(stderr)))
|
|
||||||
|
|
||||||
# parse results
|
|
||||||
host = None
|
|
||||||
ip = None
|
|
||||||
ports = []
|
|
||||||
|
|
||||||
|
if attempt_to_read_cache:
|
||||||
try:
|
try:
|
||||||
t_stdout = to_text(stdout, errors='surrogate_or_strict')
|
results = self._cache[cache_key]
|
||||||
except UnicodeError as e:
|
except KeyError:
|
||||||
raise AnsibleParserError('Invalid (non unicode) input returned: %s' % to_native(e))
|
# This occurs if the cache_key is not in the cache or if the cache_key expired, so the cache needs to be updated
|
||||||
|
cache_needs_update = True
|
||||||
|
|
||||||
for line in t_stdout.splitlines():
|
if not user_cache_setting or cache_needs_update:
|
||||||
hits = self.find_host.match(line)
|
# setup command
|
||||||
if hits:
|
cmd = [self._nmap]
|
||||||
if host is not None:
|
if not self._options['ports']:
|
||||||
self.inventory.set_variable(host, 'ports', ports)
|
cmd.append('-sP')
|
||||||
|
|
||||||
# if dns only shows arpa, just use ip instead as hostname
|
if self._options['ipv4'] and not self._options['ipv6']:
|
||||||
if hits.group(1).endswith('.in-addr.arpa'):
|
cmd.append('-4')
|
||||||
host = hits.group(2)
|
elif self._options['ipv6'] and not self._options['ipv4']:
|
||||||
else:
|
cmd.append('-6')
|
||||||
host = hits.group(1)
|
elif not self._options['ipv6'] and not self._options['ipv4']:
|
||||||
|
raise AnsibleParserError('One of ipv4 or ipv6 must be enabled for this plugin')
|
||||||
|
|
||||||
# if no reverse dns exists, just use ip instead as hostname
|
if self._options['exclude']:
|
||||||
if hits.group(2) is not None:
|
cmd.append('--exclude')
|
||||||
ip = hits.group(2)
|
cmd.append(','.join(self._options['exclude']))
|
||||||
else:
|
|
||||||
ip = hits.group(1)
|
|
||||||
|
|
||||||
if host is not None:
|
cmd.append(self._options['address'])
|
||||||
# update inventory
|
try:
|
||||||
self.inventory.add_host(host)
|
# execute
|
||||||
self.inventory.set_variable(host, 'ip', ip)
|
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
|
||||||
ports = []
|
stdout, stderr = p.communicate()
|
||||||
continue
|
if p.returncode != 0:
|
||||||
|
raise AnsibleParserError('Failed to run nmap, rc=%s: %s' % (p.returncode, to_native(stderr)))
|
||||||
|
|
||||||
host_ports = self.find_port.match(line)
|
# parse results
|
||||||
if host is not None and host_ports:
|
host = None
|
||||||
ports.append({'port': host_ports.group(1), 'protocol': host_ports.group(2), 'state': host_ports.group(3), 'service': host_ports.group(4)})
|
ip = None
|
||||||
continue
|
ports = []
|
||||||
|
results = []
|
||||||
|
|
||||||
# TODO: parse more data, OS?
|
try:
|
||||||
|
t_stdout = to_text(stdout, errors='surrogate_or_strict')
|
||||||
|
except UnicodeError as e:
|
||||||
|
raise AnsibleParserError('Invalid (non unicode) input returned: %s' % to_native(e))
|
||||||
|
|
||||||
# if any leftovers
|
for line in t_stdout.splitlines():
|
||||||
if host and ports:
|
hits = self.find_host.match(line)
|
||||||
self.inventory.set_variable(host, 'ports', ports)
|
if hits:
|
||||||
|
if host is not None and ports:
|
||||||
|
results[-1]['ports'] = ports
|
||||||
|
|
||||||
except Exception as e:
|
# if dns only shows arpa, just use ip instead as hostname
|
||||||
raise AnsibleParserError("failed to parse %s: %s " % (to_native(path), to_native(e)))
|
if hits.group(1).endswith('.in-addr.arpa'):
|
||||||
|
host = hits.group(2)
|
||||||
|
else:
|
||||||
|
host = hits.group(1)
|
||||||
|
|
||||||
|
# if no reverse dns exists, just use ip instead as hostname
|
||||||
|
if hits.group(2) is not None:
|
||||||
|
ip = hits.group(2)
|
||||||
|
else:
|
||||||
|
ip = hits.group(1)
|
||||||
|
|
||||||
|
if host is not None:
|
||||||
|
# update inventory
|
||||||
|
results.append(dict())
|
||||||
|
results[-1]['name'] = host
|
||||||
|
results[-1]['ip'] = ip
|
||||||
|
ports = []
|
||||||
|
continue
|
||||||
|
|
||||||
|
host_ports = self.find_port.match(line)
|
||||||
|
if host is not None and host_ports:
|
||||||
|
ports.append({'port': host_ports.group(1),
|
||||||
|
'protocol': host_ports.group(2),
|
||||||
|
'state': host_ports.group(3),
|
||||||
|
'service': host_ports.group(4)})
|
||||||
|
continue
|
||||||
|
|
||||||
|
# if any leftovers
|
||||||
|
if host and ports:
|
||||||
|
results[-1]['ports'] = ports
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise AnsibleParserError("failed to parse %s: %s " % (to_native(path), to_native(e)))
|
||||||
|
|
||||||
|
if cache_needs_update:
|
||||||
|
self._cache[cache_key] = results
|
||||||
|
|
||||||
|
self._populate(results)
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ DOCUMENTATION = '''
|
|||||||
- Will retrieve the first network interface with an IP for Proxmox nodes.
|
- Will retrieve the first network interface with an IP for Proxmox nodes.
|
||||||
- Can retrieve LXC/QEMU configuration as facts.
|
- Can retrieve LXC/QEMU configuration as facts.
|
||||||
extends_documentation_fragment:
|
extends_documentation_fragment:
|
||||||
|
- constructed
|
||||||
- inventory_cache
|
- inventory_cache
|
||||||
options:
|
options:
|
||||||
plugin:
|
plugin:
|
||||||
@@ -69,6 +70,14 @@ DOCUMENTATION = '''
|
|||||||
description: Gather LXC/QEMU configuration facts.
|
description: Gather LXC/QEMU configuration facts.
|
||||||
default: no
|
default: no
|
||||||
type: bool
|
type: bool
|
||||||
|
strict:
|
||||||
|
version_added: 2.5.0
|
||||||
|
compose:
|
||||||
|
version_added: 2.5.0
|
||||||
|
groups:
|
||||||
|
version_added: 2.5.0
|
||||||
|
keyed_groups:
|
||||||
|
version_added: 2.5.0
|
||||||
'''
|
'''
|
||||||
|
|
||||||
EXAMPLES = '''
|
EXAMPLES = '''
|
||||||
@@ -78,6 +87,15 @@ url: http://localhost:8006
|
|||||||
user: ansible@pve
|
user: ansible@pve
|
||||||
password: secure
|
password: secure
|
||||||
validate_certs: no
|
validate_certs: no
|
||||||
|
keyed_groups:
|
||||||
|
- key: proxmox_tags_parsed
|
||||||
|
separator: ""
|
||||||
|
prefix: group
|
||||||
|
groups:
|
||||||
|
webservers: "'web' in (proxmox_tags_parsed|list)"
|
||||||
|
mailservers: "'mail' in (proxmox_tags_parsed|list)"
|
||||||
|
compose:
|
||||||
|
ansible_port: 2222
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import re
|
import re
|
||||||
@@ -86,7 +104,7 @@ from ansible.module_utils.common._collections_compat import MutableMapping
|
|||||||
from distutils.version import LooseVersion
|
from distutils.version import LooseVersion
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable
|
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable, Cacheable
|
||||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||||
|
|
||||||
# 3rd party imports
|
# 3rd party imports
|
||||||
@@ -99,7 +117,7 @@ except ImportError:
|
|||||||
HAS_REQUESTS = False
|
HAS_REQUESTS = False
|
||||||
|
|
||||||
|
|
||||||
class InventoryModule(BaseInventoryPlugin, Cacheable):
|
class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||||
''' Host inventory parser for ansible using Proxmox as source. '''
|
''' Host inventory parser for ansible using Proxmox as source. '''
|
||||||
|
|
||||||
NAME = 'community.general.proxmox'
|
NAME = 'community.general.proxmox'
|
||||||
@@ -206,9 +224,36 @@ class InventoryModule(BaseInventoryPlugin, Cacheable):
|
|||||||
except Exception:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def _get_agent_network_interfaces(self, node, vmid, vmtype):
|
||||||
|
result = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
ifaces = self._get_json(
|
||||||
|
"%s/api2/json/nodes/%s/%s/%s/agent/network-get-interfaces" % (
|
||||||
|
self.proxmox_url, node, vmtype, vmid
|
||||||
|
)
|
||||||
|
)['result']
|
||||||
|
|
||||||
|
for iface in ifaces:
|
||||||
|
result.append({
|
||||||
|
'name': iface['name'],
|
||||||
|
'mac-address': iface['hardware-address'],
|
||||||
|
'ip-addresses': [
|
||||||
|
"%s/%s" % (ip['ip-address'], ip['prefix']) for ip in iface['ip-addresses']
|
||||||
|
]
|
||||||
|
})
|
||||||
|
except requests.HTTPError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
def _get_vm_config(self, node, vmid, vmtype, name):
|
def _get_vm_config(self, node, vmid, vmtype, name):
|
||||||
ret = self._get_json("%s/api2/json/nodes/%s/%s/%s/config" % (self.proxmox_url, node, vmtype, vmid))
|
ret = self._get_json("%s/api2/json/nodes/%s/%s/%s/config" % (self.proxmox_url, node, vmtype, vmid))
|
||||||
|
|
||||||
|
node_key = 'node'
|
||||||
|
node_key = self.to_safe('%s%s' % (self.get_option('facts_prefix'), node_key.lower()))
|
||||||
|
self.inventory.set_variable(name, node_key, node)
|
||||||
|
|
||||||
vmid_key = 'vmid'
|
vmid_key = 'vmid'
|
||||||
vmid_key = self.to_safe('%s%s' % (self.get_option('facts_prefix'), vmid_key.lower()))
|
vmid_key = self.to_safe('%s%s' % (self.get_option('facts_prefix'), vmid_key.lower()))
|
||||||
self.inventory.set_variable(name, vmid_key, vmid)
|
self.inventory.set_variable(name, vmid_key, vmid)
|
||||||
@@ -217,6 +262,10 @@ class InventoryModule(BaseInventoryPlugin, Cacheable):
|
|||||||
vmtype_key = self.to_safe('%s%s' % (self.get_option('facts_prefix'), vmtype_key.lower()))
|
vmtype_key = self.to_safe('%s%s' % (self.get_option('facts_prefix'), vmtype_key.lower()))
|
||||||
self.inventory.set_variable(name, vmtype_key, vmtype)
|
self.inventory.set_variable(name, vmtype_key, vmtype)
|
||||||
|
|
||||||
|
plaintext_configs = [
|
||||||
|
'tags',
|
||||||
|
]
|
||||||
|
|
||||||
for config in ret:
|
for config in ret:
|
||||||
key = config
|
key = config
|
||||||
key = self.to_safe('%s%s' % (self.get_option('facts_prefix'), key.lower()))
|
key = self.to_safe('%s%s' % (self.get_option('facts_prefix'), key.lower()))
|
||||||
@@ -226,6 +275,20 @@ class InventoryModule(BaseInventoryPlugin, Cacheable):
|
|||||||
if config == 'rootfs' or config.startswith(('virtio', 'sata', 'ide', 'scsi')):
|
if config == 'rootfs' or config.startswith(('virtio', 'sata', 'ide', 'scsi')):
|
||||||
value = ('disk_image=' + value)
|
value = ('disk_image=' + value)
|
||||||
|
|
||||||
|
# Additional field containing parsed tags as list
|
||||||
|
if config == 'tags':
|
||||||
|
parsed_key = self.to_safe('%s%s' % (key, "_parsed"))
|
||||||
|
parsed_value = [tag.strip() for tag in value.split(",")]
|
||||||
|
self.inventory.set_variable(name, parsed_key, parsed_value)
|
||||||
|
|
||||||
|
# The first field in the agent string tells you whether the agent is enabled
|
||||||
|
# the rest of the comma separated string is extra config for the agent
|
||||||
|
if config == 'agent' and int(value.split(',')[0]):
|
||||||
|
agent_iface_key = self.to_safe('%s%s' % (key, "_interfaces"))
|
||||||
|
agent_iface_value = self._get_agent_network_interfaces(node, vmid, vmtype)
|
||||||
|
if agent_iface_value:
|
||||||
|
self.inventory.set_variable(name, agent_iface_key, agent_iface_value)
|
||||||
|
|
||||||
if not (isinstance(value, int) or ',' not in value):
|
if not (isinstance(value, int) or ',' not in value):
|
||||||
# split off strings with commas to a dict
|
# split off strings with commas to a dict
|
||||||
# skip over any keys that cannot be processed
|
# skip over any keys that cannot be processed
|
||||||
@@ -254,6 +317,12 @@ class InventoryModule(BaseInventoryPlugin, Cacheable):
|
|||||||
regex = r"[^A-Za-z0-9\_]"
|
regex = r"[^A-Za-z0-9\_]"
|
||||||
return re.sub(regex, "_", word.replace(" ", ""))
|
return re.sub(regex, "_", word.replace(" ", ""))
|
||||||
|
|
||||||
|
def _apply_constructable(self, name, variables):
|
||||||
|
strict = self.get_option('strict')
|
||||||
|
self._add_host_to_composed_groups(self.get_option('groups'), variables, name, strict=strict)
|
||||||
|
self._add_host_to_keyed_groups(self.get_option('keyed_groups'), variables, name, strict=strict)
|
||||||
|
self._set_composite_vars(self.get_option('compose'), variables, name, strict=strict)
|
||||||
|
|
||||||
def _populate(self):
|
def _populate(self):
|
||||||
|
|
||||||
self._get_auth()
|
self._get_auth()
|
||||||
@@ -308,6 +377,8 @@ class InventoryModule(BaseInventoryPlugin, Cacheable):
|
|||||||
if self.get_option('want_facts'):
|
if self.get_option('want_facts'):
|
||||||
self._get_vm_config(node['node'], lxc['vmid'], 'lxc', lxc['name'])
|
self._get_vm_config(node['node'], lxc['vmid'], 'lxc', lxc['name'])
|
||||||
|
|
||||||
|
self._apply_constructable(lxc["name"], self.inventory.get_host(lxc['name']).get_vars())
|
||||||
|
|
||||||
# get QEMU vm's for this node
|
# get QEMU vm's for this node
|
||||||
node_qemu_group = self.to_safe('%s%s' % (self.get_option('group_prefix'), ('%s_qemu' % node['node']).lower()))
|
node_qemu_group = self.to_safe('%s%s' % (self.get_option('group_prefix'), ('%s_qemu' % node['node']).lower()))
|
||||||
self.inventory.add_group(node_qemu_group)
|
self.inventory.add_group(node_qemu_group)
|
||||||
@@ -330,6 +401,8 @@ class InventoryModule(BaseInventoryPlugin, Cacheable):
|
|||||||
if self.get_option('want_facts'):
|
if self.get_option('want_facts'):
|
||||||
self._get_vm_config(node['node'], qemu['vmid'], 'qemu', qemu['name'])
|
self._get_vm_config(node['node'], qemu['vmid'], 'qemu', qemu['name'])
|
||||||
|
|
||||||
|
self._apply_constructable(qemu["name"], self.inventory.get_host(qemu['name']).get_vars())
|
||||||
|
|
||||||
# gather vm's in pools
|
# gather vm's in pools
|
||||||
for pool in self._get_pools():
|
for pool in self._get_pools():
|
||||||
if pool.get('poolid'):
|
if pool.get('poolid'):
|
||||||
@@ -339,7 +412,8 @@ class InventoryModule(BaseInventoryPlugin, Cacheable):
|
|||||||
|
|
||||||
for member in self._get_members_per_pool(pool['poolid']):
|
for member in self._get_members_per_pool(pool['poolid']):
|
||||||
if member.get('name'):
|
if member.get('name'):
|
||||||
self.inventory.add_child(pool_group, member['name'])
|
if not member.get('template'):
|
||||||
|
self.inventory.add_child(pool_group, member['name'])
|
||||||
|
|
||||||
def parse(self, inventory, loader, path, cache=True):
|
def parse(self, inventory, loader, path, cache=True):
|
||||||
if not HAS_REQUESTS:
|
if not HAS_REQUESTS:
|
||||||
|
|||||||
@@ -10,6 +10,8 @@ DOCUMENTATION = '''
|
|||||||
name: stackpath_compute
|
name: stackpath_compute
|
||||||
short_description: StackPath Edge Computing inventory source
|
short_description: StackPath Edge Computing inventory source
|
||||||
version_added: 1.2.0
|
version_added: 1.2.0
|
||||||
|
author:
|
||||||
|
- UNKNOWN (@shayrybak)
|
||||||
extends_documentation_fragment:
|
extends_documentation_fragment:
|
||||||
- inventory_cache
|
- inventory_cache
|
||||||
- constructed
|
- constructed
|
||||||
@@ -102,13 +104,13 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
|||||||
raise AnsibleError("plugin doesn't match this plugin")
|
raise AnsibleError("plugin doesn't match this plugin")
|
||||||
try:
|
try:
|
||||||
client_id = config['client_id']
|
client_id = config['client_id']
|
||||||
if client_id != 32:
|
if len(client_id) != 32:
|
||||||
raise AnsibleError("client_id must be 32 characters long")
|
raise AnsibleError("client_id must be 32 characters long")
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise AnsibleError("config missing client_id, a required option")
|
raise AnsibleError("config missing client_id, a required option")
|
||||||
try:
|
try:
|
||||||
client_secret = config['client_secret']
|
client_secret = config['client_secret']
|
||||||
if client_secret != 64:
|
if len(client_secret) != 64:
|
||||||
raise AnsibleError("client_secret must be 64 characters long")
|
raise AnsibleError("client_secret must be 64 characters long")
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise AnsibleError("config missing client_id, a required option")
|
raise AnsibleError("config missing client_id, a required option")
|
||||||
|
|||||||
@@ -171,10 +171,10 @@ class LookupModule(LookupBase):
|
|||||||
|
|
||||||
paramvals = {
|
paramvals = {
|
||||||
'key': params[0],
|
'key': params[0],
|
||||||
'token': None,
|
'token': self.get_option('token'),
|
||||||
'recurse': False,
|
'recurse': self.get_option('recurse'),
|
||||||
'index': None,
|
'index': self.get_option('index'),
|
||||||
'datacenter': None
|
'datacenter': self.get_option('datacenter')
|
||||||
}
|
}
|
||||||
|
|
||||||
# parameters specified?
|
# parameters specified?
|
||||||
|
|||||||
@@ -31,7 +31,9 @@ EXAMPLES = r"""
|
|||||||
- name: Template files (explicitly skip directories in order to use the 'src' attribute)
|
- name: Template files (explicitly skip directories in order to use the 'src' attribute)
|
||||||
ansible.builtin.template:
|
ansible.builtin.template:
|
||||||
src: '{{ item.src }}'
|
src: '{{ item.src }}'
|
||||||
dest: /web/{{ item.path }}
|
# Your template files should be stored with a .j2 file extension,
|
||||||
|
# but should not be deployed with it. splitext|first removes it.
|
||||||
|
dest: /web/{{ item.path | splitext | first }}
|
||||||
mode: '{{ item.mode }}'
|
mode: '{{ item.mode }}'
|
||||||
with_community.general.filetree: web/
|
with_community.general.filetree: web/
|
||||||
when: item.state == 'file'
|
when: item.state == 'file'
|
||||||
@@ -41,6 +43,7 @@ EXAMPLES = r"""
|
|||||||
src: '{{ item.src }}'
|
src: '{{ item.src }}'
|
||||||
dest: /web/{{ item.path }}
|
dest: /web/{{ item.path }}
|
||||||
state: link
|
state: link
|
||||||
|
follow: false # avoid corrupting target files if the link already exists
|
||||||
force: yes
|
force: yes
|
||||||
mode: '{{ item.mode }}'
|
mode: '{{ item.mode }}'
|
||||||
with_community.general.filetree: web/
|
with_community.general.filetree: web/
|
||||||
|
|||||||
@@ -63,6 +63,7 @@ import os
|
|||||||
|
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
from ansible.utils.cmd_functions import run_cmd
|
from ansible.utils.cmd_functions import run_cmd
|
||||||
|
from ansible.module_utils._text import to_text
|
||||||
|
|
||||||
ANSIBLE_HIERA_CFG = os.getenv('ANSIBLE_HIERA_CFG', '/etc/hiera.yaml')
|
ANSIBLE_HIERA_CFG = os.getenv('ANSIBLE_HIERA_CFG', '/etc/hiera.yaml')
|
||||||
ANSIBLE_HIERA_BIN = os.getenv('ANSIBLE_HIERA_BIN', '/usr/bin/hiera')
|
ANSIBLE_HIERA_BIN = os.getenv('ANSIBLE_HIERA_BIN', '/usr/bin/hiera')
|
||||||
@@ -78,13 +79,11 @@ class Hiera(object):
|
|||||||
rc, output, err = run_cmd("{0} -c {1} {2}".format(
|
rc, output, err = run_cmd("{0} -c {1} {2}".format(
|
||||||
ANSIBLE_HIERA_BIN, ANSIBLE_HIERA_CFG, hiera_key[0]))
|
ANSIBLE_HIERA_BIN, ANSIBLE_HIERA_CFG, hiera_key[0]))
|
||||||
|
|
||||||
return output.strip()
|
return to_text(output.strip())
|
||||||
|
|
||||||
|
|
||||||
class LookupModule(LookupBase):
|
class LookupModule(LookupBase):
|
||||||
def run(self, terms, variables=''):
|
def run(self, terms, variables=''):
|
||||||
hiera = Hiera()
|
hiera = Hiera()
|
||||||
ret = []
|
ret = [hiera.get(terms)]
|
||||||
|
|
||||||
ret.append(hiera.get(terms))
|
|
||||||
return ret
|
return ret
|
||||||
|
|||||||
@@ -103,6 +103,14 @@ EXAMPLES = r"""
|
|||||||
| items2dict(key_name='slug',
|
| items2dict(key_name='slug',
|
||||||
value_name='itemValue'))['password']
|
value_name='itemValue'))['password']
|
||||||
}}
|
}}
|
||||||
|
|
||||||
|
- hosts: localhost
|
||||||
|
vars:
|
||||||
|
secret_password: >-
|
||||||
|
{{ ((lookup('community.general.tss', 1) | from_json).get('items') | items2dict(key_name='slug', value_name='itemValue'))['password'] }}"
|
||||||
|
tasks:
|
||||||
|
- ansible.builtin.debug:
|
||||||
|
msg: the password is {{ secret_password }}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from ansible.errors import AnsibleError, AnsibleOptionsError
|
from ansible.errors import AnsibleError, AnsibleOptionsError
|
||||||
|
|||||||
67
plugins/module_utils/csv.py
Normal file
67
plugins/module_utils/csv.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright: (c) 2021, Andrew Pantuso (@ajpantuso) <ajpantuso@gmail.com>
|
||||||
|
# Copyright: (c) 2018, Dag Wieers (@dagwieers) <dag@wieers.com>
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import csv
|
||||||
|
from io import BytesIO, StringIO
|
||||||
|
|
||||||
|
from ansible.module_utils._text import to_native
|
||||||
|
from ansible.module_utils.six import PY3
|
||||||
|
|
||||||
|
|
||||||
|
class CustomDialectFailureError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DialectNotAvailableError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
CSVError = csv.Error
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_dialect(dialect, **kwargs):
|
||||||
|
# Add Unix dialect from Python 3
|
||||||
|
class unix_dialect(csv.Dialect):
|
||||||
|
"""Describe the usual properties of Unix-generated CSV files."""
|
||||||
|
delimiter = ','
|
||||||
|
quotechar = '"'
|
||||||
|
doublequote = True
|
||||||
|
skipinitialspace = False
|
||||||
|
lineterminator = '\n'
|
||||||
|
quoting = csv.QUOTE_ALL
|
||||||
|
|
||||||
|
csv.register_dialect("unix", unix_dialect)
|
||||||
|
|
||||||
|
if dialect not in csv.list_dialects():
|
||||||
|
raise DialectNotAvailableError("Dialect '%s' is not supported by your version of python." % dialect)
|
||||||
|
|
||||||
|
# Create a dictionary from only set options
|
||||||
|
dialect_params = dict((k, v) for k, v in kwargs.items() if v is not None)
|
||||||
|
if dialect_params:
|
||||||
|
try:
|
||||||
|
csv.register_dialect('custom', dialect, **dialect_params)
|
||||||
|
except TypeError as e:
|
||||||
|
raise CustomDialectFailureError("Unable to create custom dialect: %s" % to_native(e))
|
||||||
|
dialect = 'custom'
|
||||||
|
|
||||||
|
return dialect
|
||||||
|
|
||||||
|
|
||||||
|
def read_csv(data, dialect, fieldnames=None):
|
||||||
|
|
||||||
|
data = to_native(data, errors='surrogate_or_strict')
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
fake_fh = StringIO(data)
|
||||||
|
else:
|
||||||
|
fake_fh = BytesIO(data)
|
||||||
|
|
||||||
|
reader = csv.DictReader(fake_fh, fieldnames=fieldnames, dialect=dialect)
|
||||||
|
|
||||||
|
return reader
|
||||||
234
plugins/module_utils/gandi_livedns_api.py
Normal file
234
plugins/module_utils/gandi_livedns_api.py
Normal file
@@ -0,0 +1,234 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright: (c) 2019 Gregory Thiemonge <gregory.thiemonge@gmail.com>
|
||||||
|
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
from ansible.module_utils._text import to_native, to_text
|
||||||
|
from ansible.module_utils.urls import fetch_url
|
||||||
|
|
||||||
|
|
||||||
|
class GandiLiveDNSAPI(object):
|
||||||
|
|
||||||
|
api_endpoint = 'https://api.gandi.net/v5/livedns'
|
||||||
|
changed = False
|
||||||
|
|
||||||
|
error_strings = {
|
||||||
|
400: 'Bad request',
|
||||||
|
401: 'Permission denied',
|
||||||
|
404: 'Resource not found',
|
||||||
|
}
|
||||||
|
|
||||||
|
attribute_map = {
|
||||||
|
'record': 'rrset_name',
|
||||||
|
'type': 'rrset_type',
|
||||||
|
'ttl': 'rrset_ttl',
|
||||||
|
'values': 'rrset_values'
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, module):
|
||||||
|
self.module = module
|
||||||
|
self.api_key = module.params['api_key']
|
||||||
|
|
||||||
|
def _build_error_message(self, module, info):
|
||||||
|
s = ''
|
||||||
|
body = info.get('body')
|
||||||
|
if body:
|
||||||
|
errors = module.from_json(body).get('errors')
|
||||||
|
if errors:
|
||||||
|
error = errors[0]
|
||||||
|
name = error.get('name')
|
||||||
|
if name:
|
||||||
|
s += '{0} :'.format(name)
|
||||||
|
description = error.get('description')
|
||||||
|
if description:
|
||||||
|
s += description
|
||||||
|
return s
|
||||||
|
|
||||||
|
def _gandi_api_call(self, api_call, method='GET', payload=None, error_on_404=True):
|
||||||
|
headers = {'Authorization': 'Apikey {0}'.format(self.api_key),
|
||||||
|
'Content-Type': 'application/json'}
|
||||||
|
data = None
|
||||||
|
if payload:
|
||||||
|
try:
|
||||||
|
data = json.dumps(payload)
|
||||||
|
except Exception as e:
|
||||||
|
self.module.fail_json(msg="Failed to encode payload as JSON: %s " % to_native(e))
|
||||||
|
|
||||||
|
resp, info = fetch_url(self.module,
|
||||||
|
self.api_endpoint + api_call,
|
||||||
|
headers=headers,
|
||||||
|
data=data,
|
||||||
|
method=method)
|
||||||
|
|
||||||
|
error_msg = ''
|
||||||
|
if info['status'] >= 400 and (info['status'] != 404 or error_on_404):
|
||||||
|
err_s = self.error_strings.get(info['status'], '')
|
||||||
|
|
||||||
|
error_msg = "API Error {0}: {1}".format(err_s, self._build_error_message(self.module, info))
|
||||||
|
|
||||||
|
result = None
|
||||||
|
try:
|
||||||
|
content = resp.read()
|
||||||
|
except AttributeError:
|
||||||
|
content = None
|
||||||
|
|
||||||
|
if content:
|
||||||
|
try:
|
||||||
|
result = json.loads(to_text(content, errors='surrogate_or_strict'))
|
||||||
|
except (getattr(json, 'JSONDecodeError', ValueError)) as e:
|
||||||
|
error_msg += "; Failed to parse API response with error {0}: {1}".format(to_native(e), content)
|
||||||
|
|
||||||
|
if error_msg:
|
||||||
|
self.module.fail_json(msg=error_msg)
|
||||||
|
|
||||||
|
return result, info['status']
|
||||||
|
|
||||||
|
def build_result(self, result, domain):
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
res = {}
|
||||||
|
for k in self.attribute_map:
|
||||||
|
v = result.get(self.attribute_map[k], None)
|
||||||
|
if v is not None:
|
||||||
|
if k == 'record' and v == '@':
|
||||||
|
v = ''
|
||||||
|
res[k] = v
|
||||||
|
|
||||||
|
res['domain'] = domain
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
def build_results(self, results, domain):
|
||||||
|
if results is None:
|
||||||
|
return []
|
||||||
|
return [self.build_result(r, domain) for r in results]
|
||||||
|
|
||||||
|
def get_records(self, record, type, domain):
|
||||||
|
url = '/domains/%s/records' % (domain)
|
||||||
|
if record:
|
||||||
|
url += '/%s' % (record)
|
||||||
|
if type:
|
||||||
|
url += '/%s' % (type)
|
||||||
|
|
||||||
|
records, status = self._gandi_api_call(url, error_on_404=False)
|
||||||
|
|
||||||
|
if status == 404:
|
||||||
|
return []
|
||||||
|
|
||||||
|
if not isinstance(records, list):
|
||||||
|
records = [records]
|
||||||
|
|
||||||
|
# filter by type if record is not set
|
||||||
|
if not record and type:
|
||||||
|
records = [r
|
||||||
|
for r in records
|
||||||
|
if r['rrset_type'] == type]
|
||||||
|
|
||||||
|
return records
|
||||||
|
|
||||||
|
def create_record(self, record, type, values, ttl, domain):
|
||||||
|
url = '/domains/%s/records' % (domain)
|
||||||
|
new_record = {
|
||||||
|
'rrset_name': record,
|
||||||
|
'rrset_type': type,
|
||||||
|
'rrset_values': values,
|
||||||
|
'rrset_ttl': ttl,
|
||||||
|
}
|
||||||
|
record, status = self._gandi_api_call(url, method='POST', payload=new_record)
|
||||||
|
|
||||||
|
if status in (200, 201,):
|
||||||
|
return new_record
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def update_record(self, record, type, values, ttl, domain):
|
||||||
|
url = '/domains/%s/records/%s/%s' % (domain, record, type)
|
||||||
|
new_record = {
|
||||||
|
'rrset_values': values,
|
||||||
|
'rrset_ttl': ttl,
|
||||||
|
}
|
||||||
|
record = self._gandi_api_call(url, method='PUT', payload=new_record)[0]
|
||||||
|
return record
|
||||||
|
|
||||||
|
def delete_record(self, record, type, domain):
|
||||||
|
url = '/domains/%s/records/%s/%s' % (domain, record, type)
|
||||||
|
|
||||||
|
self._gandi_api_call(url, method='DELETE')
|
||||||
|
|
||||||
|
def delete_dns_record(self, record, type, values, domain):
|
||||||
|
if record == '':
|
||||||
|
record = '@'
|
||||||
|
|
||||||
|
records = self.get_records(record, type, domain)
|
||||||
|
|
||||||
|
if records:
|
||||||
|
cur_record = records[0]
|
||||||
|
|
||||||
|
self.changed = True
|
||||||
|
|
||||||
|
if values is not None and set(cur_record['rrset_values']) != set(values):
|
||||||
|
new_values = set(cur_record['rrset_values']) - set(values)
|
||||||
|
if new_values:
|
||||||
|
# Removing one or more values from a record, we update the record with the remaining values
|
||||||
|
self.update_record(record, type, list(new_values), cur_record['rrset_ttl'], domain)
|
||||||
|
records = self.get_records(record, type, domain)
|
||||||
|
return records[0], self.changed
|
||||||
|
|
||||||
|
if not self.module.check_mode:
|
||||||
|
self.delete_record(record, type, domain)
|
||||||
|
else:
|
||||||
|
cur_record = None
|
||||||
|
|
||||||
|
return None, self.changed
|
||||||
|
|
||||||
|
def ensure_dns_record(self, record, type, ttl, values, domain):
|
||||||
|
if record == '':
|
||||||
|
record = '@'
|
||||||
|
|
||||||
|
records = self.get_records(record, type, domain)
|
||||||
|
|
||||||
|
if records:
|
||||||
|
cur_record = records[0]
|
||||||
|
|
||||||
|
do_update = False
|
||||||
|
if ttl is not None and cur_record['rrset_ttl'] != ttl:
|
||||||
|
do_update = True
|
||||||
|
if values is not None and set(cur_record['rrset_values']) != set(values):
|
||||||
|
do_update = True
|
||||||
|
|
||||||
|
if do_update:
|
||||||
|
if self.module.check_mode:
|
||||||
|
result = dict(
|
||||||
|
rrset_type=type,
|
||||||
|
rrset_name=record,
|
||||||
|
rrset_values=values,
|
||||||
|
rrset_ttl=ttl
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.update_record(record, type, values, ttl, domain)
|
||||||
|
|
||||||
|
records = self.get_records(record, type, domain)
|
||||||
|
result = records[0]
|
||||||
|
self.changed = True
|
||||||
|
return result, self.changed
|
||||||
|
else:
|
||||||
|
return cur_record, self.changed
|
||||||
|
|
||||||
|
if self.module.check_mode:
|
||||||
|
new_record = dict(
|
||||||
|
rrset_type=type,
|
||||||
|
rrset_name=record,
|
||||||
|
rrset_values=values,
|
||||||
|
rrset_ttl=ttl
|
||||||
|
)
|
||||||
|
result = new_record
|
||||||
|
else:
|
||||||
|
result = self.create_record(record, type, values, ttl, domain)
|
||||||
|
|
||||||
|
self.changed = True
|
||||||
|
return result, self.changed
|
||||||
@@ -55,7 +55,7 @@ def keycloak_argument_spec():
|
|||||||
:return: argument_spec dict
|
:return: argument_spec dict
|
||||||
"""
|
"""
|
||||||
return dict(
|
return dict(
|
||||||
auth_keycloak_url=dict(type='str', aliases=['url'], required=True),
|
auth_keycloak_url=dict(type='str', aliases=['url'], required=True, no_log=False),
|
||||||
auth_client_id=dict(type='str', default='admin-cli'),
|
auth_client_id=dict(type='str', default='admin-cli'),
|
||||||
auth_realm=dict(type='str', required=True),
|
auth_realm=dict(type='str', required=True),
|
||||||
auth_client_secret=dict(type='str', default=None, no_log=True),
|
auth_client_secret=dict(type='str', default=None, no_log=True),
|
||||||
|
|||||||
@@ -119,9 +119,9 @@ class IPAClient(object):
|
|||||||
data = dict(method=method)
|
data = dict(method=method)
|
||||||
|
|
||||||
# TODO: We should probably handle this a little better.
|
# TODO: We should probably handle this a little better.
|
||||||
if method in ('ping', 'config_show'):
|
if method in ('ping', 'config_show', 'otpconfig_show'):
|
||||||
data['params'] = [[], {}]
|
data['params'] = [[], {}]
|
||||||
elif method == 'config_mod':
|
elif method in ('config_mod', 'otpconfig_mod'):
|
||||||
data['params'] = [[], item]
|
data['params'] = [[], item]
|
||||||
else:
|
else:
|
||||||
data['params'] = [[name], item]
|
data['params'] = [[name], item]
|
||||||
|
|||||||
@@ -87,11 +87,12 @@ def not_in_host_file(self, host):
|
|||||||
user_host_file = "~/.ssh/known_hosts"
|
user_host_file = "~/.ssh/known_hosts"
|
||||||
user_host_file = os.path.expanduser(user_host_file)
|
user_host_file = os.path.expanduser(user_host_file)
|
||||||
|
|
||||||
host_file_list = []
|
host_file_list = [
|
||||||
host_file_list.append(user_host_file)
|
user_host_file,
|
||||||
host_file_list.append("/etc/ssh/ssh_known_hosts")
|
"/etc/ssh/ssh_known_hosts",
|
||||||
host_file_list.append("/etc/ssh/ssh_known_hosts2")
|
"/etc/ssh/ssh_known_hosts2",
|
||||||
host_file_list.append("/etc/openssh/ssh_known_hosts")
|
"/etc/openssh/ssh_known_hosts",
|
||||||
|
]
|
||||||
|
|
||||||
hfiles_not_found = 0
|
hfiles_not_found = 0
|
||||||
for hf in host_file_list:
|
for hf in host_file_list:
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from functools import partial, wraps
|
|||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible.module_utils.common.dict_transformations import dict_merge
|
||||||
|
|
||||||
|
|
||||||
class ModuleHelperException(Exception):
|
class ModuleHelperException(Exception):
|
||||||
@@ -24,12 +25,12 @@ class ModuleHelperException(Exception):
|
|||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.msg = self._get_remove('msg', kwargs) or "Module failed with exception: {0}".format(self)
|
self.msg = self._get_remove('msg', kwargs) or "Module failed with exception: {0}".format(self)
|
||||||
self.update_output = self._get_remove('update_output', kwargs) or {}
|
self.update_output = self._get_remove('update_output', kwargs) or {}
|
||||||
super(ModuleHelperException, self).__init__(*args, **kwargs)
|
super(ModuleHelperException, self).__init__(*args)
|
||||||
|
|
||||||
|
|
||||||
class ArgFormat(object):
|
class ArgFormat(object):
|
||||||
"""
|
"""
|
||||||
Argument formatter
|
Argument formatter for use as a command line parameter. Used in CmdMixin.
|
||||||
"""
|
"""
|
||||||
BOOLEAN = 0
|
BOOLEAN = 0
|
||||||
PRINTF = 1
|
PRINTF = 1
|
||||||
@@ -50,7 +51,8 @@ class ArgFormat(object):
|
|||||||
|
|
||||||
def __init__(self, name, fmt=None, style=FORMAT, stars=0):
|
def __init__(self, name, fmt=None, style=FORMAT, stars=0):
|
||||||
"""
|
"""
|
||||||
Creates a new formatter
|
Creates a CLI-formatter for one specific argument. The argument may be a module parameter or just a named parameter for
|
||||||
|
the CLI command execution.
|
||||||
:param name: Name of the argument to be formatted
|
:param name: Name of the argument to be formatted
|
||||||
:param fmt: Either a str to be formatted (using or not printf-style) or a callable that does that
|
:param fmt: Either a str to be formatted (using or not printf-style) or a callable that does that
|
||||||
:param style: Whether arg_format (as str) should use printf-style formatting.
|
:param style: Whether arg_format (as str) should use printf-style formatting.
|
||||||
@@ -93,22 +95,33 @@ class ArgFormat(object):
|
|||||||
self.arg_format = (self.stars_deco(stars))(self.arg_format)
|
self.arg_format = (self.stars_deco(stars))(self.arg_format)
|
||||||
|
|
||||||
def to_text(self, value):
|
def to_text(self, value):
|
||||||
|
if value is None:
|
||||||
|
return []
|
||||||
func = self.arg_format
|
func = self.arg_format
|
||||||
return [str(p) for p in func(value)]
|
return [str(p) for p in func(value)]
|
||||||
|
|
||||||
|
|
||||||
def cause_changes(func, on_success=True, on_failure=False):
|
def cause_changes(on_success=None, on_failure=None):
|
||||||
@wraps(func)
|
|
||||||
def wrapper(self, *args, **kwargs):
|
def deco(func):
|
||||||
try:
|
if on_success is None and on_failure is None:
|
||||||
func(*args, **kwargs)
|
return func
|
||||||
if on_success:
|
|
||||||
self.changed = True
|
@wraps(func)
|
||||||
except Exception as e:
|
def wrapper(*args, **kwargs):
|
||||||
if on_failure:
|
try:
|
||||||
self.changed = True
|
self = args[0]
|
||||||
raise
|
func(*args, **kwargs)
|
||||||
return wrapper
|
if on_success is not None:
|
||||||
|
self.changed = on_success
|
||||||
|
except Exception:
|
||||||
|
if on_failure is not None:
|
||||||
|
self.changed = on_failure
|
||||||
|
raise
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
return deco
|
||||||
|
|
||||||
|
|
||||||
def module_fails_on_exception(func):
|
def module_fails_on_exception(func):
|
||||||
@@ -121,10 +134,12 @@ def module_fails_on_exception(func):
|
|||||||
except ModuleHelperException as e:
|
except ModuleHelperException as e:
|
||||||
if e.update_output:
|
if e.update_output:
|
||||||
self.update_output(e.update_output)
|
self.update_output(e.update_output)
|
||||||
|
self.module.fail_json(msg=e.msg, exception=traceback.format_exc(),
|
||||||
|
output=self.output, vars=self.vars.output(), **self.output)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.vars.msg = "Module failed with exception: {0}".format(str(e).strip())
|
msg = "Module failed with exception: {0}".format(str(e).strip())
|
||||||
self.vars.exception = traceback.format_exc()
|
self.module.fail_json(msg=msg, exception=traceback.format_exc(),
|
||||||
self.module.fail_json(changed=False, msg=self.vars.msg, exception=self.vars.exception, output=self.output, vars=self.vars)
|
output=self.output, vars=self.vars.output(), **self.output)
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
@@ -138,7 +153,7 @@ class DependencyCtxMgr(object):
|
|||||||
self.exc_tb = None
|
self.exc_tb = None
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
pass
|
return self
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
self.has_it = exc_type is None
|
self.has_it = exc_type is None
|
||||||
@@ -152,32 +167,157 @@ class DependencyCtxMgr(object):
|
|||||||
return self.msg or str(self.exc_val)
|
return self.msg or str(self.exc_val)
|
||||||
|
|
||||||
|
|
||||||
class ModuleHelper(object):
|
class VarMeta(object):
|
||||||
_dependencies = []
|
NOTHING = object()
|
||||||
module = {}
|
|
||||||
facts_name = None
|
def __init__(self, diff=False, output=True, change=None, fact=False):
|
||||||
|
self.init = False
|
||||||
|
self.initial_value = None
|
||||||
|
self.value = None
|
||||||
|
|
||||||
|
self.diff = diff
|
||||||
|
self.change = diff if change is None else change
|
||||||
|
self.output = output
|
||||||
|
self.fact = fact
|
||||||
|
|
||||||
|
def set(self, diff=None, output=None, change=None, fact=None, initial_value=NOTHING):
|
||||||
|
if diff is not None:
|
||||||
|
self.diff = diff
|
||||||
|
if output is not None:
|
||||||
|
self.output = output
|
||||||
|
if change is not None:
|
||||||
|
self.change = change
|
||||||
|
if fact is not None:
|
||||||
|
self.fact = fact
|
||||||
|
if initial_value is not self.NOTHING:
|
||||||
|
self.initial_value = initial_value
|
||||||
|
|
||||||
|
def set_value(self, value):
|
||||||
|
if not self.init:
|
||||||
|
self.initial_value = value
|
||||||
|
self.init = True
|
||||||
|
self.value = value
|
||||||
|
return self
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_changed(self):
|
||||||
|
return self.change and (self.initial_value != self.value)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def diff_result(self):
|
||||||
|
return None if not (self.diff and self.has_changed) else {
|
||||||
|
'before': self.initial_value,
|
||||||
|
'after': self.value,
|
||||||
|
}
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "<VarMeta: value={0}, initial={1}, diff={2}, output={3}, change={4}>".format(
|
||||||
|
self.value, self.initial_value, self.diff, self.output, self.change
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleHelper(object):
|
||||||
|
_output_conflict_list = ('msg', 'exception', 'output', 'vars', 'changed')
|
||||||
|
_dependencies = []
|
||||||
|
module = None
|
||||||
|
facts_name = None
|
||||||
|
output_params = ()
|
||||||
|
diff_params = ()
|
||||||
|
change_params = ()
|
||||||
|
facts_params = ()
|
||||||
|
|
||||||
|
class VarDict(object):
|
||||||
|
def __init__(self):
|
||||||
|
self._data = dict()
|
||||||
|
self._meta = dict()
|
||||||
|
|
||||||
|
def __getitem__(self, item):
|
||||||
|
return self._data[item]
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
self.set(key, value)
|
||||||
|
|
||||||
class AttrDict(dict):
|
|
||||||
def __getattr__(self, item):
|
def __getattr__(self, item):
|
||||||
return self[item]
|
try:
|
||||||
|
return self._data[item]
|
||||||
|
except KeyError:
|
||||||
|
return getattr(self._data, item)
|
||||||
|
|
||||||
|
def __setattr__(self, key, value):
|
||||||
|
if key in ('_data', '_meta'):
|
||||||
|
super(ModuleHelper.VarDict, self).__setattr__(key, value)
|
||||||
|
else:
|
||||||
|
self.set(key, value)
|
||||||
|
|
||||||
|
def meta(self, name):
|
||||||
|
return self._meta[name]
|
||||||
|
|
||||||
|
def set_meta(self, name, **kwargs):
|
||||||
|
self.meta(name).set(**kwargs)
|
||||||
|
|
||||||
|
def set(self, name, value, **kwargs):
|
||||||
|
if name in ('_data', '_meta'):
|
||||||
|
raise ValueError("Names _data and _meta are reserved for use by ModuleHelper")
|
||||||
|
self._data[name] = value
|
||||||
|
if name in self._meta:
|
||||||
|
meta = self.meta(name)
|
||||||
|
else:
|
||||||
|
meta = VarMeta(**kwargs)
|
||||||
|
meta.set_value(value)
|
||||||
|
self._meta[name] = meta
|
||||||
|
|
||||||
|
def output(self):
|
||||||
|
return dict((k, v) for k, v in self._data.items() if self.meta(k).output)
|
||||||
|
|
||||||
|
def diff(self):
|
||||||
|
diff_results = [(k, self.meta(k).diff_result) for k in self._data]
|
||||||
|
diff_results = [dr for dr in diff_results if dr[1] is not None]
|
||||||
|
if diff_results:
|
||||||
|
before = dict((dr[0], dr[1]['before']) for dr in diff_results)
|
||||||
|
after = dict((dr[0], dr[1]['after']) for dr in diff_results)
|
||||||
|
return {'before': before, 'after': after}
|
||||||
|
return None
|
||||||
|
|
||||||
|
def facts(self):
|
||||||
|
facts_result = dict((k, v) for k, v in self._data.items() if self._meta[k].fact)
|
||||||
|
return facts_result if facts_result else None
|
||||||
|
|
||||||
|
def change_vars(self):
|
||||||
|
return [v for v in self._data if self.meta(v).change]
|
||||||
|
|
||||||
|
def has_changed(self, v):
|
||||||
|
return self._meta[v].has_changed
|
||||||
|
|
||||||
def __init__(self, module=None):
|
def __init__(self, module=None):
|
||||||
self.vars = ModuleHelper.AttrDict()
|
self.vars = ModuleHelper.VarDict()
|
||||||
self.output_dict = dict()
|
|
||||||
self.facts_dict = dict()
|
|
||||||
self._changed = False
|
self._changed = False
|
||||||
|
|
||||||
if module:
|
if module:
|
||||||
self.module = module
|
self.module = module
|
||||||
|
|
||||||
if isinstance(self.module, dict):
|
if not isinstance(self.module, AnsibleModule):
|
||||||
self.module = AnsibleModule(**self.module)
|
self.module = AnsibleModule(**self.module)
|
||||||
|
|
||||||
|
for name, value in self.module.params.items():
|
||||||
|
self.vars.set(
|
||||||
|
name, value,
|
||||||
|
diff=name in self.diff_params,
|
||||||
|
output=name in self.output_params,
|
||||||
|
change=None if not self.change_params else name in self.change_params,
|
||||||
|
fact=name in self.facts_params,
|
||||||
|
)
|
||||||
|
|
||||||
|
def update_vars(self, meta=None, **kwargs):
|
||||||
|
if meta is None:
|
||||||
|
meta = {}
|
||||||
|
for k, v in kwargs.items():
|
||||||
|
self.vars.set(k, v, **meta)
|
||||||
|
|
||||||
def update_output(self, **kwargs):
|
def update_output(self, **kwargs):
|
||||||
self.output_dict.update(kwargs)
|
self.update_vars(meta={"output": True}, **kwargs)
|
||||||
|
|
||||||
def update_facts(self, **kwargs):
|
def update_facts(self, **kwargs):
|
||||||
self.facts_dict.update(kwargs)
|
self.update_vars(meta={"fact": True}, **kwargs)
|
||||||
|
|
||||||
def __init_module__(self):
|
def __init_module__(self):
|
||||||
pass
|
pass
|
||||||
@@ -188,6 +328,9 @@ class ModuleHelper(object):
|
|||||||
def __quit_module__(self):
|
def __quit_module__(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def _vars_changed(self):
|
||||||
|
return any(self.vars.has_changed(v) for v in self.vars.change_vars())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def changed(self):
|
def changed(self):
|
||||||
return self._changed
|
return self._changed
|
||||||
@@ -196,12 +339,25 @@ class ModuleHelper(object):
|
|||||||
def changed(self, value):
|
def changed(self, value):
|
||||||
self._changed = value
|
self._changed = value
|
||||||
|
|
||||||
|
def has_changed(self):
|
||||||
|
return self.changed or self._vars_changed()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def output(self):
|
def output(self):
|
||||||
result = dict(self.vars)
|
result = dict(self.vars.output())
|
||||||
result.update(self.output_dict)
|
|
||||||
if self.facts_name:
|
if self.facts_name:
|
||||||
result['ansible_facts'] = {self.facts_name: self.facts_dict}
|
facts = self.vars.facts()
|
||||||
|
if facts is not None:
|
||||||
|
result['ansible_facts'] = {self.facts_name: facts}
|
||||||
|
if self.module._diff:
|
||||||
|
diff = result.get('diff', {})
|
||||||
|
vars_diff = self.vars.diff() or {}
|
||||||
|
result['diff'] = dict_merge(dict(diff), vars_diff)
|
||||||
|
|
||||||
|
for varname in result:
|
||||||
|
if varname in self._output_conflict_list:
|
||||||
|
result["_" + varname] = result[varname]
|
||||||
|
del result[varname]
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@module_fails_on_exception
|
@module_fails_on_exception
|
||||||
@@ -210,7 +366,7 @@ class ModuleHelper(object):
|
|||||||
self.__init_module__()
|
self.__init_module__()
|
||||||
self.__run__()
|
self.__run__()
|
||||||
self.__quit_module__()
|
self.__quit_module__()
|
||||||
self.module.exit_json(changed=self.changed, **self.output_dict)
|
self.module.exit_json(changed=self.has_changed(), **self.output)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def dependency(cls, name, msg):
|
def dependency(cls, name, msg):
|
||||||
@@ -221,9 +377,9 @@ class ModuleHelper(object):
|
|||||||
for d in self._dependencies:
|
for d in self._dependencies:
|
||||||
if not d.has_it:
|
if not d.has_it:
|
||||||
self.module.fail_json(changed=False,
|
self.module.fail_json(changed=False,
|
||||||
exception=d.exc_val.__traceback__.format_exc(),
|
exception="\n".join(traceback.format_exception(d.exc_type, d.exc_val, d.exc_tb)),
|
||||||
msg=d.text,
|
msg=d.text,
|
||||||
**self.output_dict)
|
**self.output)
|
||||||
|
|
||||||
|
|
||||||
class StateMixin(object):
|
class StateMixin(object):
|
||||||
@@ -292,7 +448,10 @@ class CmdMixin(object):
|
|||||||
|
|
||||||
extra_params = extra_params or dict()
|
extra_params = extra_params or dict()
|
||||||
cmd_args = list([self.command]) if isinstance(self.command, str) else list(self.command)
|
cmd_args = list([self.command]) if isinstance(self.command, str) else list(self.command)
|
||||||
cmd_args[0] = self.module.get_bin_path(cmd_args[0])
|
try:
|
||||||
|
cmd_args[0] = self.module.get_bin_path(cmd_args[0], required=True)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
param_list = params if params else self.module.params.keys()
|
param_list = params if params else self.module.params.keys()
|
||||||
|
|
||||||
for param in param_list:
|
for param in param_list:
|
||||||
@@ -326,16 +485,19 @@ class CmdMixin(object):
|
|||||||
return rc, out, err
|
return rc, out, err
|
||||||
|
|
||||||
def run_command(self, extra_params=None, params=None, *args, **kwargs):
|
def run_command(self, extra_params=None, params=None, *args, **kwargs):
|
||||||
self.vars['cmd_args'] = self._calculate_args(extra_params, params)
|
self.vars.cmd_args = self._calculate_args(extra_params, params)
|
||||||
options = dict(self.run_command_fixed_options)
|
options = dict(self.run_command_fixed_options)
|
||||||
env_update = dict(options.get('environ_update', {}))
|
|
||||||
options['check_rc'] = options.get('check_rc', self.check_rc)
|
options['check_rc'] = options.get('check_rc', self.check_rc)
|
||||||
|
options.update(kwargs)
|
||||||
|
env_update = dict(options.get('environ_update', {}))
|
||||||
if self.force_lang:
|
if self.force_lang:
|
||||||
env_update.update({'LANGUAGE': self.force_lang})
|
env_update.update({
|
||||||
|
'LANGUAGE': self.force_lang,
|
||||||
|
'LC_ALL': self.force_lang,
|
||||||
|
})
|
||||||
self.update_output(force_lang=self.force_lang)
|
self.update_output(force_lang=self.force_lang)
|
||||||
options['environ_update'] = env_update
|
options['environ_update'] = env_update
|
||||||
options.update(kwargs)
|
rc, out, err = self.module.run_command(self.vars.cmd_args, *args, **options)
|
||||||
rc, out, err = self.module.run_command(self.vars['cmd_args'], *args, **options)
|
|
||||||
self.update_output(rc=rc, stdout=out, stderr=err)
|
self.update_output(rc=rc, stdout=out, stderr=err)
|
||||||
return self.process_command_output(rc, out, err)
|
return self.process_command_output(rc, out, err)
|
||||||
|
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ from ansible.module_utils._text import to_native
|
|||||||
from ansible.module_utils.six import iteritems
|
from ansible.module_utils.six import iteritems
|
||||||
from ansible.module_utils._text import to_text
|
from ansible.module_utils._text import to_text
|
||||||
from ansible.module_utils.basic import env_fallback
|
from ansible.module_utils.basic import env_fallback
|
||||||
|
from ansible.module_utils.common.validation import check_type_dict
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from infoblox_client.connector import Connector
|
from infoblox_client.connector import Connector
|
||||||
@@ -399,11 +400,11 @@ class WapiModule(WapiBase):
|
|||||||
|
|
||||||
if 'ipv4addrs' in proposed_object:
|
if 'ipv4addrs' in proposed_object:
|
||||||
if 'nios_next_ip' in proposed_object['ipv4addrs'][0]['ipv4addr']:
|
if 'nios_next_ip' in proposed_object['ipv4addrs'][0]['ipv4addr']:
|
||||||
ip_range = self.module._check_type_dict(proposed_object['ipv4addrs'][0]['ipv4addr'])['nios_next_ip']
|
ip_range = check_type_dict(proposed_object['ipv4addrs'][0]['ipv4addr'])['nios_next_ip']
|
||||||
proposed_object['ipv4addrs'][0]['ipv4addr'] = NIOS_NEXT_AVAILABLE_IP + ':' + ip_range
|
proposed_object['ipv4addrs'][0]['ipv4addr'] = NIOS_NEXT_AVAILABLE_IP + ':' + ip_range
|
||||||
elif 'ipv4addr' in proposed_object:
|
elif 'ipv4addr' in proposed_object:
|
||||||
if 'nios_next_ip' in proposed_object['ipv4addr']:
|
if 'nios_next_ip' in proposed_object['ipv4addr']:
|
||||||
ip_range = self.module._check_type_dict(proposed_object['ipv4addr'])['nios_next_ip']
|
ip_range = check_type_dict(proposed_object['ipv4addr'])['nios_next_ip']
|
||||||
proposed_object['ipv4addr'] = NIOS_NEXT_AVAILABLE_IP + ':' + ip_range
|
proposed_object['ipv4addr'] = NIOS_NEXT_AVAILABLE_IP + ':' + ip_range
|
||||||
|
|
||||||
return proposed_object
|
return proposed_object
|
||||||
@@ -485,7 +486,7 @@ class WapiModule(WapiBase):
|
|||||||
if ('name' in obj_filter):
|
if ('name' in obj_filter):
|
||||||
# gets and returns the current object based on name/old_name passed
|
# gets and returns the current object based on name/old_name passed
|
||||||
try:
|
try:
|
||||||
name_obj = self.module._check_type_dict(obj_filter['name'])
|
name_obj = check_type_dict(obj_filter['name'])
|
||||||
old_name = name_obj['old_name']
|
old_name = name_obj['old_name']
|
||||||
new_name = name_obj['new_name']
|
new_name = name_obj['new_name']
|
||||||
except TypeError:
|
except TypeError:
|
||||||
@@ -521,7 +522,7 @@ class WapiModule(WapiBase):
|
|||||||
test_obj_filter['name'] = test_obj_filter['name'].lower()
|
test_obj_filter['name'] = test_obj_filter['name'].lower()
|
||||||
# resolves issue where multiple a_records with same name and different IP address
|
# resolves issue where multiple a_records with same name and different IP address
|
||||||
try:
|
try:
|
||||||
ipaddr_obj = self.module._check_type_dict(obj_filter['ipv4addr'])
|
ipaddr_obj = check_type_dict(obj_filter['ipv4addr'])
|
||||||
ipaddr = ipaddr_obj['old_ipv4addr']
|
ipaddr = ipaddr_obj['old_ipv4addr']
|
||||||
except TypeError:
|
except TypeError:
|
||||||
ipaddr = obj_filter['ipv4addr']
|
ipaddr = obj_filter['ipv4addr']
|
||||||
@@ -530,7 +531,7 @@ class WapiModule(WapiBase):
|
|||||||
# resolves issue where multiple txt_records with same name and different text
|
# resolves issue where multiple txt_records with same name and different text
|
||||||
test_obj_filter = obj_filter
|
test_obj_filter = obj_filter
|
||||||
try:
|
try:
|
||||||
text_obj = self.module._check_type_dict(obj_filter['text'])
|
text_obj = check_type_dict(obj_filter['text'])
|
||||||
txt = text_obj['old_text']
|
txt = text_obj['old_text']
|
||||||
except TypeError:
|
except TypeError:
|
||||||
txt = obj_filter['text']
|
txt = obj_filter['text']
|
||||||
@@ -543,7 +544,7 @@ class WapiModule(WapiBase):
|
|||||||
# resolves issue where multiple a_records with same name and different IP address
|
# resolves issue where multiple a_records with same name and different IP address
|
||||||
test_obj_filter = obj_filter
|
test_obj_filter = obj_filter
|
||||||
try:
|
try:
|
||||||
ipaddr_obj = self.module._check_type_dict(obj_filter['ipv4addr'])
|
ipaddr_obj = check_type_dict(obj_filter['ipv4addr'])
|
||||||
ipaddr = ipaddr_obj['old_ipv4addr']
|
ipaddr = ipaddr_obj['old_ipv4addr']
|
||||||
except TypeError:
|
except TypeError:
|
||||||
ipaddr = obj_filter['ipv4addr']
|
ipaddr = obj_filter['ipv4addr']
|
||||||
@@ -553,7 +554,7 @@ class WapiModule(WapiBase):
|
|||||||
# resolves issue where multiple txt_records with same name and different text
|
# resolves issue where multiple txt_records with same name and different text
|
||||||
test_obj_filter = obj_filter
|
test_obj_filter = obj_filter
|
||||||
try:
|
try:
|
||||||
text_obj = self.module._check_type_dict(obj_filter['text'])
|
text_obj = check_type_dict(obj_filter['text'])
|
||||||
txt = text_obj['old_text']
|
txt = text_obj['old_text']
|
||||||
except TypeError:
|
except TypeError:
|
||||||
txt = obj_filter['text']
|
txt = obj_filter['text']
|
||||||
|
|||||||
0
plugins/module_utils/net_tools/pritunl/__init__.py
Normal file
0
plugins/module_utils/net_tools/pritunl/__init__.py
Normal file
370
plugins/module_utils/net_tools/pritunl/api.py
Normal file
370
plugins/module_utils/net_tools/pritunl/api.py
Normal file
@@ -0,0 +1,370 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright: (c) 2021, Florian Dambrine <android.florian@gmail.com>
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
"""
|
||||||
|
Pritunl API that offers CRUD operations on Pritunl Organizations and Users
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import hmac
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from ansible.module_utils.six import iteritems
|
||||||
|
from ansible.module_utils.urls import open_url
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
class PritunlException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def pritunl_argument_spec():
|
||||||
|
return dict(
|
||||||
|
pritunl_url=dict(required=True, type="str"),
|
||||||
|
pritunl_api_token=dict(required=True, type="str", no_log=False),
|
||||||
|
pritunl_api_secret=dict(required=True, type="str", no_log=True),
|
||||||
|
validate_certs=dict(required=False, type="bool", default=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_pritunl_settings(module):
|
||||||
|
"""
|
||||||
|
Helper function to set required Pritunl request params from module arguments.
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
"api_token": module.params.get("pritunl_api_token"),
|
||||||
|
"api_secret": module.params.get("pritunl_api_secret"),
|
||||||
|
"base_url": module.params.get("pritunl_url"),
|
||||||
|
"validate_certs": module.params.get("validate_certs"),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _get_pritunl_organizations(api_token, api_secret, base_url, validate_certs=True):
|
||||||
|
return pritunl_auth_request(
|
||||||
|
base_url=base_url,
|
||||||
|
api_token=api_token,
|
||||||
|
api_secret=api_secret,
|
||||||
|
method="GET",
|
||||||
|
path="/organization",
|
||||||
|
validate_certs=validate_certs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _delete_pritunl_organization(
|
||||||
|
api_token, api_secret, base_url, organization_id, validate_certs=True
|
||||||
|
):
|
||||||
|
return pritunl_auth_request(
|
||||||
|
base_url=base_url,
|
||||||
|
api_token=api_token,
|
||||||
|
api_secret=api_secret,
|
||||||
|
method="DELETE",
|
||||||
|
path="/organization/%s" % (organization_id),
|
||||||
|
validate_certs=validate_certs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _post_pritunl_organization(
|
||||||
|
api_token, api_secret, base_url, organization_data, validate_certs=True
|
||||||
|
):
|
||||||
|
return pritunl_auth_request(
|
||||||
|
api_token=api_token,
|
||||||
|
api_secret=api_secret,
|
||||||
|
base_url=base_url,
|
||||||
|
method="POST",
|
||||||
|
path="/organization/%s",
|
||||||
|
headers={"Content-Type": "application/json"},
|
||||||
|
data=json.dumps(organization_data),
|
||||||
|
validate_certs=validate_certs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_pritunl_users(
|
||||||
|
api_token, api_secret, base_url, organization_id, validate_certs=True
|
||||||
|
):
|
||||||
|
return pritunl_auth_request(
|
||||||
|
api_token=api_token,
|
||||||
|
api_secret=api_secret,
|
||||||
|
base_url=base_url,
|
||||||
|
method="GET",
|
||||||
|
path="/user/%s" % organization_id,
|
||||||
|
validate_certs=validate_certs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _delete_pritunl_user(
|
||||||
|
api_token, api_secret, base_url, organization_id, user_id, validate_certs=True
|
||||||
|
):
|
||||||
|
return pritunl_auth_request(
|
||||||
|
api_token=api_token,
|
||||||
|
api_secret=api_secret,
|
||||||
|
base_url=base_url,
|
||||||
|
method="DELETE",
|
||||||
|
path="/user/%s/%s" % (organization_id, user_id),
|
||||||
|
validate_certs=validate_certs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _post_pritunl_user(
|
||||||
|
api_token, api_secret, base_url, organization_id, user_data, validate_certs=True
|
||||||
|
):
|
||||||
|
return pritunl_auth_request(
|
||||||
|
api_token=api_token,
|
||||||
|
api_secret=api_secret,
|
||||||
|
base_url=base_url,
|
||||||
|
method="POST",
|
||||||
|
path="/user/%s" % organization_id,
|
||||||
|
headers={"Content-Type": "application/json"},
|
||||||
|
data=json.dumps(user_data),
|
||||||
|
validate_certs=validate_certs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _put_pritunl_user(
|
||||||
|
api_token,
|
||||||
|
api_secret,
|
||||||
|
base_url,
|
||||||
|
organization_id,
|
||||||
|
user_id,
|
||||||
|
user_data,
|
||||||
|
validate_certs=True,
|
||||||
|
):
|
||||||
|
return pritunl_auth_request(
|
||||||
|
api_token=api_token,
|
||||||
|
api_secret=api_secret,
|
||||||
|
base_url=base_url,
|
||||||
|
method="PUT",
|
||||||
|
path="/user/%s/%s" % (organization_id, user_id),
|
||||||
|
headers={"Content-Type": "application/json"},
|
||||||
|
data=json.dumps(user_data),
|
||||||
|
validate_certs=validate_certs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def list_pritunl_organizations(
|
||||||
|
api_token, api_secret, base_url, validate_certs=True, filters=None
|
||||||
|
):
|
||||||
|
orgs = []
|
||||||
|
|
||||||
|
response = _get_pritunl_organizations(
|
||||||
|
api_token=api_token,
|
||||||
|
api_secret=api_secret,
|
||||||
|
base_url=base_url,
|
||||||
|
validate_certs=validate_certs,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.getcode() != 200:
|
||||||
|
raise PritunlException("Could not retrieve organizations from Pritunl")
|
||||||
|
else:
|
||||||
|
for org in json.loads(response.read()):
|
||||||
|
# No filtering
|
||||||
|
if filters is None:
|
||||||
|
orgs.append(org)
|
||||||
|
else:
|
||||||
|
if not any(
|
||||||
|
filter_val != org[filter_key]
|
||||||
|
for filter_key, filter_val in iteritems(filters)
|
||||||
|
):
|
||||||
|
orgs.append(org)
|
||||||
|
|
||||||
|
return orgs
|
||||||
|
|
||||||
|
|
||||||
|
def list_pritunl_users(
|
||||||
|
api_token, api_secret, base_url, organization_id, validate_certs=True, filters=None
|
||||||
|
):
|
||||||
|
users = []
|
||||||
|
|
||||||
|
response = _get_pritunl_users(
|
||||||
|
api_token=api_token,
|
||||||
|
api_secret=api_secret,
|
||||||
|
base_url=base_url,
|
||||||
|
validate_certs=validate_certs,
|
||||||
|
organization_id=organization_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.getcode() != 200:
|
||||||
|
raise PritunlException("Could not retrieve users from Pritunl")
|
||||||
|
else:
|
||||||
|
for user in json.loads(response.read()):
|
||||||
|
# No filtering
|
||||||
|
if filters is None:
|
||||||
|
users.append(user)
|
||||||
|
|
||||||
|
else:
|
||||||
|
if not any(
|
||||||
|
filter_val != user[filter_key]
|
||||||
|
for filter_key, filter_val in iteritems(filters)
|
||||||
|
):
|
||||||
|
users.append(user)
|
||||||
|
|
||||||
|
return users
|
||||||
|
|
||||||
|
|
||||||
|
def post_pritunl_organization(
|
||||||
|
api_token,
|
||||||
|
api_secret,
|
||||||
|
base_url,
|
||||||
|
organization_name,
|
||||||
|
validate_certs=True,
|
||||||
|
):
|
||||||
|
response = _post_pritunl_organization(
|
||||||
|
api_token=api_token,
|
||||||
|
api_secret=api_secret,
|
||||||
|
base_url=base_url,
|
||||||
|
organization_data={"name": organization_name},
|
||||||
|
validate_certs=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.getcode() != 200:
|
||||||
|
raise PritunlException(
|
||||||
|
"Could not add organization %s to Pritunl" % (organization_name)
|
||||||
|
)
|
||||||
|
# The user PUT request returns the updated user object
|
||||||
|
return json.loads(response.read())
|
||||||
|
|
||||||
|
|
||||||
|
def post_pritunl_user(
|
||||||
|
api_token,
|
||||||
|
api_secret,
|
||||||
|
base_url,
|
||||||
|
organization_id,
|
||||||
|
user_data,
|
||||||
|
user_id=None,
|
||||||
|
validate_certs=True,
|
||||||
|
):
|
||||||
|
# If user_id is provided will do PUT otherwise will do POST
|
||||||
|
if user_id is None:
|
||||||
|
response = _post_pritunl_user(
|
||||||
|
api_token=api_token,
|
||||||
|
api_secret=api_secret,
|
||||||
|
base_url=base_url,
|
||||||
|
organization_id=organization_id,
|
||||||
|
user_data=user_data,
|
||||||
|
validate_certs=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.getcode() != 200:
|
||||||
|
raise PritunlException(
|
||||||
|
"Could not remove user %s from organization %s from Pritunl"
|
||||||
|
% (user_id, organization_id)
|
||||||
|
)
|
||||||
|
# user POST request returns an array of a single item,
|
||||||
|
# so return this item instead of the list
|
||||||
|
return json.loads(response.read())[0]
|
||||||
|
else:
|
||||||
|
response = _put_pritunl_user(
|
||||||
|
api_token=api_token,
|
||||||
|
api_secret=api_secret,
|
||||||
|
base_url=base_url,
|
||||||
|
organization_id=organization_id,
|
||||||
|
user_data=user_data,
|
||||||
|
user_id=user_id,
|
||||||
|
validate_certs=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.getcode() != 200:
|
||||||
|
raise PritunlException(
|
||||||
|
"Could not update user %s from organization %s from Pritunl"
|
||||||
|
% (user_id, organization_id)
|
||||||
|
)
|
||||||
|
# The user PUT request returns the updated user object
|
||||||
|
return json.loads(response.read())
|
||||||
|
|
||||||
|
|
||||||
|
def delete_pritunl_organization(
|
||||||
|
api_token, api_secret, base_url, organization_id, validate_certs=True
|
||||||
|
):
|
||||||
|
response = _delete_pritunl_organization(
|
||||||
|
api_token=api_token,
|
||||||
|
api_secret=api_secret,
|
||||||
|
base_url=base_url,
|
||||||
|
organization_id=organization_id,
|
||||||
|
validate_certs=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.getcode() != 200:
|
||||||
|
raise PritunlException(
|
||||||
|
"Could not remove organization %s from Pritunl" % (organization_id)
|
||||||
|
)
|
||||||
|
|
||||||
|
return json.loads(response.read())
|
||||||
|
|
||||||
|
|
||||||
|
def delete_pritunl_user(
|
||||||
|
api_token, api_secret, base_url, organization_id, user_id, validate_certs=True
|
||||||
|
):
|
||||||
|
response = _delete_pritunl_user(
|
||||||
|
api_token=api_token,
|
||||||
|
api_secret=api_secret,
|
||||||
|
base_url=base_url,
|
||||||
|
organization_id=organization_id,
|
||||||
|
user_id=user_id,
|
||||||
|
validate_certs=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.getcode() != 200:
|
||||||
|
raise PritunlException(
|
||||||
|
"Could not remove user %s from organization %s from Pritunl"
|
||||||
|
% (user_id, organization_id)
|
||||||
|
)
|
||||||
|
|
||||||
|
return json.loads(response.read())
|
||||||
|
|
||||||
|
|
||||||
|
def pritunl_auth_request(
|
||||||
|
api_token,
|
||||||
|
api_secret,
|
||||||
|
base_url,
|
||||||
|
method,
|
||||||
|
path,
|
||||||
|
validate_certs=True,
|
||||||
|
headers=None,
|
||||||
|
data=None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Send an API call to a Pritunl server.
|
||||||
|
Taken from https://pritunl.com/api and adaped work with Ansible open_url
|
||||||
|
"""
|
||||||
|
auth_timestamp = str(int(time.time()))
|
||||||
|
auth_nonce = uuid.uuid4().hex
|
||||||
|
|
||||||
|
auth_string = "&".join(
|
||||||
|
[api_token, auth_timestamp, auth_nonce, method.upper(), path]
|
||||||
|
+ ([data] if data else [])
|
||||||
|
)
|
||||||
|
|
||||||
|
auth_signature = base64.b64encode(
|
||||||
|
hmac.new(
|
||||||
|
api_secret.encode("utf-8"), auth_string.encode("utf-8"), hashlib.sha256
|
||||||
|
).digest()
|
||||||
|
)
|
||||||
|
|
||||||
|
auth_headers = {
|
||||||
|
"Auth-Token": api_token,
|
||||||
|
"Auth-Timestamp": auth_timestamp,
|
||||||
|
"Auth-Nonce": auth_nonce,
|
||||||
|
"Auth-Signature": auth_signature,
|
||||||
|
}
|
||||||
|
|
||||||
|
if headers:
|
||||||
|
auth_headers.update(headers)
|
||||||
|
|
||||||
|
try:
|
||||||
|
uri = "%s%s" % (base_url, path)
|
||||||
|
|
||||||
|
return open_url(
|
||||||
|
uri,
|
||||||
|
method=method.upper(),
|
||||||
|
headers=auth_headers,
|
||||||
|
data=data,
|
||||||
|
validate_certs=validate_certs,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
raise PritunlException(e)
|
||||||
@@ -39,14 +39,16 @@ class OpenNebulaModule:
|
|||||||
wait_timeout=dict(type='int', default=300),
|
wait_timeout=dict(type='int', default=300),
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(self, argument_spec, supports_check_mode=False, mutually_exclusive=None):
|
def __init__(self, argument_spec, supports_check_mode=False, mutually_exclusive=None, required_one_of=None, required_if=None):
|
||||||
|
|
||||||
module_args = OpenNebulaModule.common_args
|
module_args = OpenNebulaModule.common_args.copy()
|
||||||
module_args.update(argument_spec)
|
module_args.update(argument_spec)
|
||||||
|
|
||||||
self.module = AnsibleModule(argument_spec=module_args,
|
self.module = AnsibleModule(argument_spec=module_args,
|
||||||
supports_check_mode=supports_check_mode,
|
supports_check_mode=supports_check_mode,
|
||||||
mutually_exclusive=mutually_exclusive)
|
mutually_exclusive=mutually_exclusive,
|
||||||
|
required_one_of=required_one_of,
|
||||||
|
required_if=required_if)
|
||||||
self.result = dict(changed=False,
|
self.result = dict(changed=False,
|
||||||
original_message='',
|
original_message='',
|
||||||
message='')
|
message='')
|
||||||
|
|||||||
@@ -104,7 +104,7 @@ def get_common_arg_spec(supports_create=False, supports_wait=False):
|
|||||||
|
|
||||||
if supports_create:
|
if supports_create:
|
||||||
common_args.update(
|
common_args.update(
|
||||||
key_by=dict(type="list", elements="str"),
|
key_by=dict(type="list", elements="str", no_log=False),
|
||||||
force_create=dict(type="bool", default=False),
|
force_create=dict(type="bool", default=False),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -39,13 +39,34 @@ class RedfishUtils(object):
|
|||||||
self.data_modification = data_modification
|
self.data_modification = data_modification
|
||||||
self._init_session()
|
self._init_session()
|
||||||
|
|
||||||
|
def _auth_params(self, headers):
|
||||||
|
"""
|
||||||
|
Return tuple of required authentication params based on the presence
|
||||||
|
of a token in the self.creds dict. If using a token, set the
|
||||||
|
X-Auth-Token header in the `headers` param.
|
||||||
|
|
||||||
|
:param headers: dict containing headers to send in request
|
||||||
|
:return: tuple of username, password and force_basic_auth
|
||||||
|
"""
|
||||||
|
if self.creds.get('token'):
|
||||||
|
username = None
|
||||||
|
password = None
|
||||||
|
force_basic_auth = False
|
||||||
|
headers['X-Auth-Token'] = self.creds['token']
|
||||||
|
else:
|
||||||
|
username = self.creds['user']
|
||||||
|
password = self.creds['pswd']
|
||||||
|
force_basic_auth = True
|
||||||
|
return username, password, force_basic_auth
|
||||||
|
|
||||||
# The following functions are to send GET/POST/PATCH/DELETE requests
|
# The following functions are to send GET/POST/PATCH/DELETE requests
|
||||||
def get_request(self, uri):
|
def get_request(self, uri):
|
||||||
|
req_headers = dict(GET_HEADERS)
|
||||||
|
username, password, basic_auth = self._auth_params(req_headers)
|
||||||
try:
|
try:
|
||||||
resp = open_url(uri, method="GET", headers=GET_HEADERS,
|
resp = open_url(uri, method="GET", headers=req_headers,
|
||||||
url_username=self.creds['user'],
|
url_username=username, url_password=password,
|
||||||
url_password=self.creds['pswd'],
|
force_basic_auth=basic_auth, validate_certs=False,
|
||||||
force_basic_auth=True, validate_certs=False,
|
|
||||||
follow_redirects='all',
|
follow_redirects='all',
|
||||||
use_proxy=True, timeout=self.timeout)
|
use_proxy=True, timeout=self.timeout)
|
||||||
data = json.loads(to_native(resp.read()))
|
data = json.loads(to_native(resp.read()))
|
||||||
@@ -66,14 +87,16 @@ class RedfishUtils(object):
|
|||||||
return {'ret': True, 'data': data, 'headers': headers}
|
return {'ret': True, 'data': data, 'headers': headers}
|
||||||
|
|
||||||
def post_request(self, uri, pyld):
|
def post_request(self, uri, pyld):
|
||||||
|
req_headers = dict(POST_HEADERS)
|
||||||
|
username, password, basic_auth = self._auth_params(req_headers)
|
||||||
try:
|
try:
|
||||||
resp = open_url(uri, data=json.dumps(pyld),
|
resp = open_url(uri, data=json.dumps(pyld),
|
||||||
headers=POST_HEADERS, method="POST",
|
headers=req_headers, method="POST",
|
||||||
url_username=self.creds['user'],
|
url_username=username, url_password=password,
|
||||||
url_password=self.creds['pswd'],
|
force_basic_auth=basic_auth, validate_certs=False,
|
||||||
force_basic_auth=True, validate_certs=False,
|
|
||||||
follow_redirects='all',
|
follow_redirects='all',
|
||||||
use_proxy=True, timeout=self.timeout)
|
use_proxy=True, timeout=self.timeout)
|
||||||
|
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
msg = self._get_extended_message(e)
|
msg = self._get_extended_message(e)
|
||||||
return {'ret': False,
|
return {'ret': False,
|
||||||
@@ -87,10 +110,10 @@ class RedfishUtils(object):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {'ret': False,
|
return {'ret': False,
|
||||||
'msg': "Failed POST request to '%s': '%s'" % (uri, to_text(e))}
|
'msg': "Failed POST request to '%s': '%s'" % (uri, to_text(e))}
|
||||||
return {'ret': True, 'resp': resp}
|
return {'ret': True, 'headers': headers, 'resp': resp}
|
||||||
|
|
||||||
def patch_request(self, uri, pyld):
|
def patch_request(self, uri, pyld):
|
||||||
headers = PATCH_HEADERS
|
req_headers = dict(PATCH_HEADERS)
|
||||||
r = self.get_request(uri)
|
r = self.get_request(uri)
|
||||||
if r['ret']:
|
if r['ret']:
|
||||||
# Get etag from etag header or @odata.etag property
|
# Get etag from etag header or @odata.etag property
|
||||||
@@ -98,15 +121,13 @@ class RedfishUtils(object):
|
|||||||
if not etag:
|
if not etag:
|
||||||
etag = r['data'].get('@odata.etag')
|
etag = r['data'].get('@odata.etag')
|
||||||
if etag:
|
if etag:
|
||||||
# Make copy of headers and add If-Match header
|
req_headers['If-Match'] = etag
|
||||||
headers = dict(headers)
|
username, password, basic_auth = self._auth_params(req_headers)
|
||||||
headers['If-Match'] = etag
|
|
||||||
try:
|
try:
|
||||||
resp = open_url(uri, data=json.dumps(pyld),
|
resp = open_url(uri, data=json.dumps(pyld),
|
||||||
headers=headers, method="PATCH",
|
headers=req_headers, method="PATCH",
|
||||||
url_username=self.creds['user'],
|
url_username=username, url_password=password,
|
||||||
url_password=self.creds['pswd'],
|
force_basic_auth=basic_auth, validate_certs=False,
|
||||||
force_basic_auth=True, validate_certs=False,
|
|
||||||
follow_redirects='all',
|
follow_redirects='all',
|
||||||
use_proxy=True, timeout=self.timeout)
|
use_proxy=True, timeout=self.timeout)
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
@@ -125,13 +146,14 @@ class RedfishUtils(object):
|
|||||||
return {'ret': True, 'resp': resp}
|
return {'ret': True, 'resp': resp}
|
||||||
|
|
||||||
def delete_request(self, uri, pyld=None):
|
def delete_request(self, uri, pyld=None):
|
||||||
|
req_headers = dict(DELETE_HEADERS)
|
||||||
|
username, password, basic_auth = self._auth_params(req_headers)
|
||||||
try:
|
try:
|
||||||
data = json.dumps(pyld) if pyld else None
|
data = json.dumps(pyld) if pyld else None
|
||||||
resp = open_url(uri, data=data,
|
resp = open_url(uri, data=data,
|
||||||
headers=DELETE_HEADERS, method="DELETE",
|
headers=req_headers, method="DELETE",
|
||||||
url_username=self.creds['user'],
|
url_username=username, url_password=password,
|
||||||
url_password=self.creds['pswd'],
|
force_basic_auth=basic_auth, validate_certs=False,
|
||||||
force_basic_auth=True, validate_certs=False,
|
|
||||||
follow_redirects='all',
|
follow_redirects='all',
|
||||||
use_proxy=True, timeout=self.timeout)
|
use_proxy=True, timeout=self.timeout)
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
@@ -1196,6 +1218,54 @@ class RedfishUtils(object):
|
|||||||
|
|
||||||
return {'ret': True, 'changed': True, 'msg': "Clear all sessions successfully"}
|
return {'ret': True, 'changed': True, 'msg': "Clear all sessions successfully"}
|
||||||
|
|
||||||
|
def create_session(self):
|
||||||
|
if not self.creds.get('user') or not self.creds.get('pswd'):
|
||||||
|
return {'ret': False, 'msg':
|
||||||
|
'Must provide the username and password parameters for '
|
||||||
|
'the CreateSession command'}
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
'UserName': self.creds['user'],
|
||||||
|
'Password': self.creds['pswd']
|
||||||
|
}
|
||||||
|
response = self.post_request(self.root_uri + self.sessions_uri, payload)
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
|
||||||
|
headers = response['headers']
|
||||||
|
if 'x-auth-token' not in headers:
|
||||||
|
return {'ret': False, 'msg':
|
||||||
|
'The service did not return the X-Auth-Token header in '
|
||||||
|
'the response from the Sessions collection POST'}
|
||||||
|
|
||||||
|
if 'location' not in headers:
|
||||||
|
self.module.warn(
|
||||||
|
'The service did not return the Location header for the '
|
||||||
|
'session URL in the response from the Sessions collection '
|
||||||
|
'POST')
|
||||||
|
session_uri = None
|
||||||
|
else:
|
||||||
|
session_uri = urlparse(headers.get('location')).path
|
||||||
|
|
||||||
|
session = dict()
|
||||||
|
session['token'] = headers.get('x-auth-token')
|
||||||
|
session['uri'] = session_uri
|
||||||
|
return {'ret': True, 'changed': True, 'session': session,
|
||||||
|
'msg': 'Session created successfully'}
|
||||||
|
|
||||||
|
def delete_session(self, session_uri):
|
||||||
|
if not session_uri:
|
||||||
|
return {'ret': False, 'msg':
|
||||||
|
'Must provide the session_uri parameter for the '
|
||||||
|
'DeleteSession command'}
|
||||||
|
|
||||||
|
response = self.delete_request(self.root_uri + session_uri)
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
|
||||||
|
return {'ret': True, 'changed': True,
|
||||||
|
'msg': 'Session deleted successfully'}
|
||||||
|
|
||||||
def get_firmware_update_capabilities(self):
|
def get_firmware_update_capabilities(self):
|
||||||
result = {}
|
result = {}
|
||||||
response = self.get_request(self.root_uri + self.update_uri)
|
response = self.get_request(self.root_uri + self.update_uri)
|
||||||
@@ -2676,6 +2746,10 @@ class RedfishUtils(object):
|
|||||||
need_change = True
|
need_change = True
|
||||||
# type is list
|
# type is list
|
||||||
if isinstance(set_value, list):
|
if isinstance(set_value, list):
|
||||||
|
if len(set_value) != len(cur_value):
|
||||||
|
# if arrays are not the same len, no need to check each element
|
||||||
|
need_change = True
|
||||||
|
continue
|
||||||
for i in range(len(set_value)):
|
for i in range(len(set_value)):
|
||||||
for subprop in payload[property][i].keys():
|
for subprop in payload[property][i].keys():
|
||||||
if subprop not in target_ethernet_current_setting[property][i]:
|
if subprop not in target_ethernet_current_setting[property][i]:
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ class ScalewayException(Exception):
|
|||||||
R_LINK_HEADER = r'''<[^>]+>;\srel="(first|previous|next|last)"
|
R_LINK_HEADER = r'''<[^>]+>;\srel="(first|previous|next|last)"
|
||||||
(,<[^>]+>;\srel="(first|previous|next|last)")*'''
|
(,<[^>]+>;\srel="(first|previous|next|last)")*'''
|
||||||
# Specify a single relation, for iteration and string extraction purposes
|
# Specify a single relation, for iteration and string extraction purposes
|
||||||
R_RELATION = r'<(?P<target_IRI>[^>]+)>; rel="(?P<relation>first|previous|next|last)"'
|
R_RELATION = r'</?(?P<target_IRI>[^>]+)>; rel="(?P<relation>first|previous|next|last)"'
|
||||||
|
|
||||||
|
|
||||||
def parse_pagination_link(header):
|
def parse_pagination_link(header):
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ except ImportError:
|
|||||||
XENAPI_IMP_ERR = traceback.format_exc()
|
XENAPI_IMP_ERR = traceback.format_exc()
|
||||||
|
|
||||||
from ansible.module_utils.basic import env_fallback, missing_required_lib
|
from ansible.module_utils.basic import env_fallback, missing_required_lib
|
||||||
from ansible.module_utils.common.network import is_mac
|
|
||||||
from ansible.module_utils.ansible_release import __version__ as ANSIBLE_VERSION
|
from ansible.module_utils.ansible_release import __version__ as ANSIBLE_VERSION
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -102,7 +102,8 @@ def do_install(module, mode, rootfs, container, image, values_list, backend):
|
|||||||
system_list = ["--system"] if mode == 'system' else []
|
system_list = ["--system"] if mode == 'system' else []
|
||||||
user_list = ["--user"] if mode == 'user' else []
|
user_list = ["--user"] if mode == 'user' else []
|
||||||
rootfs_list = ["--rootfs=%s" % rootfs] if rootfs else []
|
rootfs_list = ["--rootfs=%s" % rootfs] if rootfs else []
|
||||||
args = ['atomic', 'install', "--storage=%s" % backend, '--name=%s' % container] + system_list + user_list + rootfs_list + values_list + [image]
|
atomic_bin = module.get_bin_path('atomic')
|
||||||
|
args = [atomic_bin, 'install', "--storage=%s" % backend, '--name=%s' % container] + system_list + user_list + rootfs_list + values_list + [image]
|
||||||
rc, out, err = module.run_command(args, check_rc=False)
|
rc, out, err = module.run_command(args, check_rc=False)
|
||||||
if rc != 0:
|
if rc != 0:
|
||||||
module.fail_json(rc=rc, msg=err)
|
module.fail_json(rc=rc, msg=err)
|
||||||
@@ -112,7 +113,8 @@ def do_install(module, mode, rootfs, container, image, values_list, backend):
|
|||||||
|
|
||||||
|
|
||||||
def do_update(module, container, image, values_list):
|
def do_update(module, container, image, values_list):
|
||||||
args = ['atomic', 'containers', 'update', "--rebase=%s" % image] + values_list + [container]
|
atomic_bin = module.get_bin_path('atomic')
|
||||||
|
args = [atomic_bin, 'containers', 'update', "--rebase=%s" % image] + values_list + [container]
|
||||||
rc, out, err = module.run_command(args, check_rc=False)
|
rc, out, err = module.run_command(args, check_rc=False)
|
||||||
if rc != 0:
|
if rc != 0:
|
||||||
module.fail_json(rc=rc, msg=err)
|
module.fail_json(rc=rc, msg=err)
|
||||||
@@ -122,7 +124,8 @@ def do_update(module, container, image, values_list):
|
|||||||
|
|
||||||
|
|
||||||
def do_uninstall(module, name, backend):
|
def do_uninstall(module, name, backend):
|
||||||
args = ['atomic', 'uninstall', "--storage=%s" % backend, name]
|
atomic_bin = module.get_bin_path('atomic')
|
||||||
|
args = [atomic_bin, 'uninstall', "--storage=%s" % backend, name]
|
||||||
rc, out, err = module.run_command(args, check_rc=False)
|
rc, out, err = module.run_command(args, check_rc=False)
|
||||||
if rc != 0:
|
if rc != 0:
|
||||||
module.fail_json(rc=rc, msg=err)
|
module.fail_json(rc=rc, msg=err)
|
||||||
@@ -130,7 +133,8 @@ def do_uninstall(module, name, backend):
|
|||||||
|
|
||||||
|
|
||||||
def do_rollback(module, name):
|
def do_rollback(module, name):
|
||||||
args = ['atomic', 'containers', 'rollback', name]
|
atomic_bin = module.get_bin_path('atomic')
|
||||||
|
args = [atomic_bin, 'containers', 'rollback', name]
|
||||||
rc, out, err = module.run_command(args, check_rc=False)
|
rc, out, err = module.run_command(args, check_rc=False)
|
||||||
if rc != 0:
|
if rc != 0:
|
||||||
module.fail_json(rc=rc, msg=err)
|
module.fail_json(rc=rc, msg=err)
|
||||||
@@ -148,14 +152,12 @@ def core(module):
|
|||||||
backend = module.params['backend']
|
backend = module.params['backend']
|
||||||
state = module.params['state']
|
state = module.params['state']
|
||||||
|
|
||||||
|
atomic_bin = module.get_bin_path('atomic')
|
||||||
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
|
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
|
||||||
out = {}
|
|
||||||
err = {}
|
|
||||||
rc = 0
|
|
||||||
|
|
||||||
values_list = ["--set=%s" % x for x in values] if values else []
|
values_list = ["--set=%s" % x for x in values] if values else []
|
||||||
|
|
||||||
args = ['atomic', 'containers', 'list', '--no-trunc', '-n', '--all', '-f', 'backend=%s' % backend, '-f', 'container=%s' % name]
|
args = [atomic_bin, 'containers', 'list', '--no-trunc', '-n', '--all', '-f', 'backend=%s' % backend, '-f', 'container=%s' % name]
|
||||||
rc, out, err = module.run_command(args, check_rc=False)
|
rc, out, err = module.run_command(args, check_rc=False)
|
||||||
if rc != 0:
|
if rc != 0:
|
||||||
module.fail_json(rc=rc, msg=err)
|
module.fail_json(rc=rc, msg=err)
|
||||||
@@ -194,9 +196,7 @@ def main():
|
|||||||
module.fail_json(msg="values is supported only with user or system mode")
|
module.fail_json(msg="values is supported only with user or system mode")
|
||||||
|
|
||||||
# Verify that the platform supports atomic command
|
# Verify that the platform supports atomic command
|
||||||
rc, out, err = module.run_command('atomic -v', check_rc=False)
|
dummy = module.get_bin_path('atomic', required=True)
|
||||||
if rc != 0:
|
|
||||||
module.fail_json(msg="Error in running atomic command", err=err)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
core(module)
|
core(module)
|
||||||
|
|||||||
@@ -57,18 +57,14 @@ from ansible.module_utils._text import to_native
|
|||||||
|
|
||||||
def core(module):
|
def core(module):
|
||||||
revision = module.params['revision']
|
revision = module.params['revision']
|
||||||
args = []
|
atomic_bin = module.get_bin_path('atomic', required=True)
|
||||||
|
|
||||||
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
|
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
|
||||||
|
|
||||||
if revision == 'latest':
|
if revision == 'latest':
|
||||||
args = ['atomic', 'host', 'upgrade']
|
args = [atomic_bin, 'host', 'upgrade']
|
||||||
else:
|
else:
|
||||||
args = ['atomic', 'host', 'deploy', revision]
|
args = [atomic_bin, 'host', 'deploy', revision]
|
||||||
|
|
||||||
out = {}
|
|
||||||
err = {}
|
|
||||||
rc = 0
|
|
||||||
|
|
||||||
rc, out, err = module.run_command(args, check_rc=False)
|
rc, out, err = module.run_command(args, check_rc=False)
|
||||||
|
|
||||||
|
|||||||
@@ -73,7 +73,8 @@ from ansible.module_utils._text import to_native
|
|||||||
|
|
||||||
|
|
||||||
def do_upgrade(module, image):
|
def do_upgrade(module, image):
|
||||||
args = ['atomic', 'update', '--force', image]
|
atomic_bin = module.get_bin_path('atomic')
|
||||||
|
args = [atomic_bin, 'update', '--force', image]
|
||||||
rc, out, err = module.run_command(args, check_rc=False)
|
rc, out, err = module.run_command(args, check_rc=False)
|
||||||
if rc != 0: # something went wrong emit the msg
|
if rc != 0: # something went wrong emit the msg
|
||||||
module.fail_json(rc=rc, msg=err)
|
module.fail_json(rc=rc, msg=err)
|
||||||
@@ -91,20 +92,21 @@ def core(module):
|
|||||||
is_upgraded = False
|
is_upgraded = False
|
||||||
|
|
||||||
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
|
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
|
||||||
|
atomic_bin = module.get_bin_path('atomic')
|
||||||
out = {}
|
out = {}
|
||||||
err = {}
|
err = {}
|
||||||
rc = 0
|
rc = 0
|
||||||
|
|
||||||
if backend:
|
if backend:
|
||||||
if state == 'present' or state == 'latest':
|
if state == 'present' or state == 'latest':
|
||||||
args = ['atomic', 'pull', "--storage=%s" % backend, image]
|
args = [atomic_bin, 'pull', "--storage=%s" % backend, image]
|
||||||
rc, out, err = module.run_command(args, check_rc=False)
|
rc, out, err = module.run_command(args, check_rc=False)
|
||||||
if rc < 0:
|
if rc < 0:
|
||||||
module.fail_json(rc=rc, msg=err)
|
module.fail_json(rc=rc, msg=err)
|
||||||
else:
|
else:
|
||||||
out_run = ""
|
out_run = ""
|
||||||
if started:
|
if started:
|
||||||
args = ['atomic', 'run', "--storage=%s" % backend, image]
|
args = [atomic_bin, 'run', "--storage=%s" % backend, image]
|
||||||
rc, out_run, err = module.run_command(args, check_rc=False)
|
rc, out_run, err = module.run_command(args, check_rc=False)
|
||||||
if rc < 0:
|
if rc < 0:
|
||||||
module.fail_json(rc=rc, msg=err)
|
module.fail_json(rc=rc, msg=err)
|
||||||
@@ -112,7 +114,7 @@ def core(module):
|
|||||||
changed = "Extracting" in out or "Copying blob" in out
|
changed = "Extracting" in out or "Copying blob" in out
|
||||||
module.exit_json(msg=(out + out_run), changed=changed)
|
module.exit_json(msg=(out + out_run), changed=changed)
|
||||||
elif state == 'absent':
|
elif state == 'absent':
|
||||||
args = ['atomic', 'images', 'delete', "--storage=%s" % backend, image]
|
args = [atomic_bin, 'images', 'delete', "--storage=%s" % backend, image]
|
||||||
rc, out, err = module.run_command(args, check_rc=False)
|
rc, out, err = module.run_command(args, check_rc=False)
|
||||||
if rc < 0:
|
if rc < 0:
|
||||||
module.fail_json(rc=rc, msg=err)
|
module.fail_json(rc=rc, msg=err)
|
||||||
@@ -126,11 +128,11 @@ def core(module):
|
|||||||
is_upgraded = do_upgrade(module, image)
|
is_upgraded = do_upgrade(module, image)
|
||||||
|
|
||||||
if started:
|
if started:
|
||||||
args = ['atomic', 'run', image]
|
args = [atomic_bin, 'run', image]
|
||||||
else:
|
else:
|
||||||
args = ['atomic', 'install', image]
|
args = [atomic_bin, 'install', image]
|
||||||
elif state == 'absent':
|
elif state == 'absent':
|
||||||
args = ['atomic', 'uninstall', image]
|
args = [atomic_bin, 'uninstall', image]
|
||||||
|
|
||||||
rc, out, err = module.run_command(args, check_rc=False)
|
rc, out, err = module.run_command(args, check_rc=False)
|
||||||
|
|
||||||
@@ -155,9 +157,7 @@ def main():
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Verify that the platform supports atomic command
|
# Verify that the platform supports atomic command
|
||||||
rc, out, err = module.run_command('atomic -v', check_rc=False)
|
dummy = module.get_bin_path('atomic', required=True)
|
||||||
if rc != 0:
|
|
||||||
module.fail_json(msg="Error in running atomic command", err=err)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
core(module)
|
core(module)
|
||||||
|
|||||||
@@ -260,7 +260,7 @@ class DimensionDataNetworkModule(DimensionDataModule):
|
|||||||
)
|
)
|
||||||
|
|
||||||
self.module.fail_json(
|
self.module.fail_json(
|
||||||
"Unexpected failure deleting network with id %s", network.id
|
"Unexpected failure deleting network with id %s" % network.id
|
||||||
)
|
)
|
||||||
|
|
||||||
except DimensionDataAPIException as e:
|
except DimensionDataAPIException as e:
|
||||||
|
|||||||
@@ -28,7 +28,6 @@ options:
|
|||||||
- The region of the instance. This is a required parameter only when
|
- The region of the instance. This is a required parameter only when
|
||||||
creating Linode instances. See
|
creating Linode instances. See
|
||||||
U(https://www.linode.com/docs/api/regions/).
|
U(https://www.linode.com/docs/api/regions/).
|
||||||
required: false
|
|
||||||
type: str
|
type: str
|
||||||
image:
|
image:
|
||||||
description:
|
description:
|
||||||
@@ -36,14 +35,12 @@ options:
|
|||||||
creating Linode instances. See
|
creating Linode instances. See
|
||||||
U(https://www.linode.com/docs/api/images/).
|
U(https://www.linode.com/docs/api/images/).
|
||||||
type: str
|
type: str
|
||||||
required: false
|
|
||||||
type:
|
type:
|
||||||
description:
|
description:
|
||||||
- The type of the instance. This is a required parameter only when
|
- The type of the instance. This is a required parameter only when
|
||||||
creating Linode instances. See
|
creating Linode instances. See
|
||||||
U(https://www.linode.com/docs/api/linode-types/).
|
U(https://www.linode.com/docs/api/linode-types/).
|
||||||
type: str
|
type: str
|
||||||
required: false
|
|
||||||
label:
|
label:
|
||||||
description:
|
description:
|
||||||
- The instance label. This label is used as the main determiner for
|
- The instance label. This label is used as the main determiner for
|
||||||
@@ -56,12 +53,10 @@ options:
|
|||||||
group labelling is deprecated but still supported. The encouraged
|
group labelling is deprecated but still supported. The encouraged
|
||||||
method for marking instances is to use tags.
|
method for marking instances is to use tags.
|
||||||
type: str
|
type: str
|
||||||
required: false
|
|
||||||
tags:
|
tags:
|
||||||
description:
|
description:
|
||||||
- The tags that the instance should be marked under. See
|
- The tags that the instance should be marked under. See
|
||||||
U(https://www.linode.com/docs/api/tags/).
|
U(https://www.linode.com/docs/api/tags/).
|
||||||
required: false
|
|
||||||
type: list
|
type: list
|
||||||
elements: str
|
elements: str
|
||||||
root_pass:
|
root_pass:
|
||||||
@@ -69,12 +64,10 @@ options:
|
|||||||
- The password for the root user. If not specified, one will be
|
- The password for the root user. If not specified, one will be
|
||||||
generated. This generated password will be available in the task
|
generated. This generated password will be available in the task
|
||||||
success JSON.
|
success JSON.
|
||||||
required: false
|
|
||||||
type: str
|
type: str
|
||||||
authorized_keys:
|
authorized_keys:
|
||||||
description:
|
description:
|
||||||
- A list of SSH public key parts to deploy for the root user.
|
- A list of SSH public key parts to deploy for the root user.
|
||||||
required: false
|
|
||||||
type: list
|
type: list
|
||||||
elements: str
|
elements: str
|
||||||
state:
|
state:
|
||||||
@@ -208,9 +201,8 @@ def create_linode(module, client, **kwargs):
|
|||||||
else:
|
else:
|
||||||
return response._raw_json
|
return response._raw_json
|
||||||
except TypeError:
|
except TypeError:
|
||||||
module.fail_json(msg='Unable to parse Linode instance creation'
|
module.fail_json(msg='Unable to parse Linode instance creation response. Please raise a bug against this'
|
||||||
' response. Please raise a bug against this'
|
' module on https://github.com/ansible-collections/community.general/issues'
|
||||||
' module on https://github.com/ansible/ansible/issues'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -242,15 +234,15 @@ def initialise_module():
|
|||||||
no_log=True,
|
no_log=True,
|
||||||
fallback=(env_fallback, ['LINODE_ACCESS_TOKEN']),
|
fallback=(env_fallback, ['LINODE_ACCESS_TOKEN']),
|
||||||
),
|
),
|
||||||
authorized_keys=dict(type='list', elements='str', required=False),
|
authorized_keys=dict(type='list', elements='str', no_log=False),
|
||||||
group=dict(type='str', required=False),
|
group=dict(type='str'),
|
||||||
image=dict(type='str', required=False),
|
image=dict(type='str'),
|
||||||
region=dict(type='str', required=False),
|
region=dict(type='str'),
|
||||||
root_pass=dict(type='str', required=False, no_log=True),
|
root_pass=dict(type='str', no_log=True),
|
||||||
tags=dict(type='list', elements='str', required=False),
|
tags=dict(type='list', elements='str'),
|
||||||
type=dict(type='str', required=False),
|
type=dict(type='str'),
|
||||||
stackscript_id=dict(type='int', required=False),
|
stackscript_id=dict(type='int'),
|
||||||
stackscript_data=dict(type='dict', required=False),
|
stackscript_data=dict(type='dict'),
|
||||||
),
|
),
|
||||||
supports_check_mode=False,
|
supports_check_mode=False,
|
||||||
required_one_of=(
|
required_one_of=(
|
||||||
|
|||||||
@@ -243,7 +243,6 @@ except ImportError:
|
|||||||
HAS_OVIRTSDK = False
|
HAS_OVIRTSDK = False
|
||||||
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------- #
|
# ------------------------------------------------------------------- #
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ options:
|
|||||||
password:
|
password:
|
||||||
description:
|
description:
|
||||||
- the instance root password
|
- the instance root password
|
||||||
- required only for C(state=present)
|
|
||||||
type: str
|
type: str
|
||||||
hostname:
|
hostname:
|
||||||
description:
|
description:
|
||||||
@@ -124,6 +123,15 @@ options:
|
|||||||
- with states C(stopped) , C(restarted) allow to force stop instance
|
- with states C(stopped) , C(restarted) allow to force stop instance
|
||||||
type: bool
|
type: bool
|
||||||
default: 'no'
|
default: 'no'
|
||||||
|
purge:
|
||||||
|
description:
|
||||||
|
- Remove container from all related configurations.
|
||||||
|
- For example backup jobs, replication jobs, or HA.
|
||||||
|
- Related ACLs and Firewall entries will always be removed.
|
||||||
|
- Used with state C(absent).
|
||||||
|
type: bool
|
||||||
|
default: false
|
||||||
|
version_added: 2.3.0
|
||||||
state:
|
state:
|
||||||
description:
|
description:
|
||||||
- Indicate desired state of the instance
|
- Indicate desired state of the instance
|
||||||
@@ -507,6 +515,7 @@ def main():
|
|||||||
searchdomain=dict(),
|
searchdomain=dict(),
|
||||||
timeout=dict(type='int', default=30),
|
timeout=dict(type='int', default=30),
|
||||||
force=dict(type='bool', default=False),
|
force=dict(type='bool', default=False),
|
||||||
|
purge=dict(type='bool', default=False),
|
||||||
state=dict(default='present', choices=['present', 'absent', 'stopped', 'started', 'restarted']),
|
state=dict(default='present', choices=['present', 'absent', 'stopped', 'started', 'restarted']),
|
||||||
pubkey=dict(type='str', default=None),
|
pubkey=dict(type='str', default=None),
|
||||||
unprivileged=dict(type='bool', default=False),
|
unprivileged=dict(type='bool', default=False),
|
||||||
@@ -514,7 +523,7 @@ def main():
|
|||||||
hookscript=dict(type='str'),
|
hookscript=dict(type='str'),
|
||||||
proxmox_default_behavior=dict(type='str', choices=['compatibility', 'no_defaults']),
|
proxmox_default_behavior=dict(type='str', choices=['compatibility', 'no_defaults']),
|
||||||
),
|
),
|
||||||
required_if=[('state', 'present', ['node', 'hostname', 'password', 'ostemplate'])],
|
required_if=[('state', 'present', ['node', 'hostname', 'ostemplate'])],
|
||||||
required_together=[('api_token_id', 'api_token_secret')],
|
required_together=[('api_token_id', 'api_token_secret')],
|
||||||
required_one_of=[('api_password', 'api_token_id')],
|
required_one_of=[('api_password', 'api_token_id')],
|
||||||
)
|
)
|
||||||
@@ -687,7 +696,13 @@ def main():
|
|||||||
if getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'mounted':
|
if getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'mounted':
|
||||||
module.exit_json(changed=False, msg="VM %s is mounted. Stop it with force option before deletion." % vmid)
|
module.exit_json(changed=False, msg="VM %s is mounted. Stop it with force option before deletion." % vmid)
|
||||||
|
|
||||||
taskid = getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE).delete(vmid)
|
delete_params = {}
|
||||||
|
|
||||||
|
if module.params['purge']:
|
||||||
|
delete_params['purge'] = 1
|
||||||
|
|
||||||
|
taskid = getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE).delete(vmid, **delete_params)
|
||||||
|
|
||||||
while timeout:
|
while timeout:
|
||||||
if (proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['status'] == 'stopped' and
|
if (proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['status'] == 'stopped' and
|
||||||
proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['exitstatus'] == 'OK'):
|
proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['exitstatus'] == 'OK'):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright: Tristan Le Guern (@Aversiste) <tleguern at bouledef.eu>
|
# Copyright: Tristan Le Guern (@tleguern) <tleguern at bouledef.eu>
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import absolute_import, division, print_function
|
||||||
@@ -21,7 +21,7 @@ options:
|
|||||||
- Restrict results to a specific authentication realm.
|
- Restrict results to a specific authentication realm.
|
||||||
aliases: ['realm', 'name']
|
aliases: ['realm', 'name']
|
||||||
type: str
|
type: str
|
||||||
author: Tristan Le Guern (@Aversiste)
|
author: Tristan Le Guern (@tleguern)
|
||||||
extends_documentation_fragment: community.general.proxmox.documentation
|
extends_documentation_fragment: community.general.proxmox.documentation
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ options:
|
|||||||
- Restrict results to a specific group.
|
- Restrict results to a specific group.
|
||||||
aliases: ['groupid', 'name']
|
aliases: ['groupid', 'name']
|
||||||
type: str
|
type: str
|
||||||
author: Tristan Le Guern (@Aversiste)
|
author: Tristan Le Guern (@tleguern)
|
||||||
extends_documentation_fragment: community.general.proxmox.documentation
|
extends_documentation_fragment: community.general.proxmox.documentation
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -425,6 +425,14 @@ options:
|
|||||||
option has a default of C(no). Note that the default value of I(proxmox_default_behavior)
|
option has a default of C(no). Note that the default value of I(proxmox_default_behavior)
|
||||||
changes in community.general 4.0.0.
|
changes in community.general 4.0.0.
|
||||||
type: bool
|
type: bool
|
||||||
|
tags:
|
||||||
|
description:
|
||||||
|
- List of tags to apply to the VM instance.
|
||||||
|
- Tags must start with C([a-z0-9_]) followed by zero or more of the following characters C([a-z0-9_-+.]).
|
||||||
|
- Tags are only available in Proxmox 6+.
|
||||||
|
type: list
|
||||||
|
elements: str
|
||||||
|
version_added: 2.3.0
|
||||||
target:
|
target:
|
||||||
description:
|
description:
|
||||||
- Target node. Only allowed if the original VM is on shared storage.
|
- Target node. Only allowed if the original VM is on shared storage.
|
||||||
@@ -858,7 +866,7 @@ def wait_for_task(module, proxmox, node, taskid):
|
|||||||
def create_vm(module, proxmox, vmid, newid, node, name, memory, cpu, cores, sockets, update, **kwargs):
|
def create_vm(module, proxmox, vmid, newid, node, name, memory, cpu, cores, sockets, update, **kwargs):
|
||||||
# Available only in PVE 4
|
# Available only in PVE 4
|
||||||
only_v4 = ['force', 'protection', 'skiplock']
|
only_v4 = ['force', 'protection', 'skiplock']
|
||||||
only_v6 = ['ciuser', 'cipassword', 'sshkeys', 'ipconfig']
|
only_v6 = ['ciuser', 'cipassword', 'sshkeys', 'ipconfig', 'tags']
|
||||||
|
|
||||||
# valide clone parameters
|
# valide clone parameters
|
||||||
valid_clone_params = ['format', 'full', 'pool', 'snapname', 'storage', 'target']
|
valid_clone_params = ['format', 'full', 'pool', 'snapname', 'storage', 'target']
|
||||||
@@ -928,6 +936,13 @@ def create_vm(module, proxmox, vmid, newid, node, name, memory, cpu, cores, sock
|
|||||||
if searchdomains:
|
if searchdomains:
|
||||||
kwargs['searchdomain'] = ' '.join(searchdomains)
|
kwargs['searchdomain'] = ' '.join(searchdomains)
|
||||||
|
|
||||||
|
# VM tags are expected to be valid and presented as a comma/semi-colon delimited string
|
||||||
|
if 'tags' in kwargs:
|
||||||
|
for tag in kwargs['tags']:
|
||||||
|
if not re.match(r'^[a-z0-9_][a-z0-9_\-\+\.]*$', tag):
|
||||||
|
module.fail_json(msg='%s is not a valid tag' % tag)
|
||||||
|
kwargs['tags'] = ",".join(kwargs['tags'])
|
||||||
|
|
||||||
# -args and skiplock require root@pam user - but can not use api tokens
|
# -args and skiplock require root@pam user - but can not use api tokens
|
||||||
if module.params['api_user'] == "root@pam" and module.params['args'] is None:
|
if module.params['api_user'] == "root@pam" and module.params['args'] is None:
|
||||||
if not update and module.params['proxmox_default_behavior'] == 'compatibility':
|
if not update and module.params['proxmox_default_behavior'] == 'compatibility':
|
||||||
@@ -1057,12 +1072,13 @@ def main():
|
|||||||
smbios=dict(type='str'),
|
smbios=dict(type='str'),
|
||||||
snapname=dict(type='str'),
|
snapname=dict(type='str'),
|
||||||
sockets=dict(type='int'),
|
sockets=dict(type='int'),
|
||||||
sshkeys=dict(type='str'),
|
sshkeys=dict(type='str', no_log=False),
|
||||||
startdate=dict(type='str'),
|
startdate=dict(type='str'),
|
||||||
startup=dict(),
|
startup=dict(),
|
||||||
state=dict(default='present', choices=['present', 'absent', 'stopped', 'started', 'restarted', 'current']),
|
state=dict(default='present', choices=['present', 'absent', 'stopped', 'started', 'restarted', 'current']),
|
||||||
storage=dict(type='str'),
|
storage=dict(type='str'),
|
||||||
tablet=dict(type='bool'),
|
tablet=dict(type='bool'),
|
||||||
|
tags=dict(type='list', elements='str'),
|
||||||
target=dict(type='str'),
|
target=dict(type='str'),
|
||||||
tdf=dict(type='bool'),
|
tdf=dict(type='bool'),
|
||||||
template=dict(type='bool'),
|
template=dict(type='bool'),
|
||||||
@@ -1210,7 +1226,7 @@ def main():
|
|||||||
if get_vm(proxmox, vmid) and not (update or clone):
|
if get_vm(proxmox, vmid) and not (update or clone):
|
||||||
module.exit_json(changed=False, vmid=vmid, msg="VM with vmid <%s> already exists" % vmid)
|
module.exit_json(changed=False, vmid=vmid, msg="VM with vmid <%s> already exists" % vmid)
|
||||||
elif get_vmid(proxmox, name) and not (update or clone):
|
elif get_vmid(proxmox, name) and not (update or clone):
|
||||||
module.exit_json(changed=False, vmid=vmid, msg="VM with name <%s> already exists" % name)
|
module.exit_json(changed=False, vmid=get_vmid(proxmox, name)[0], msg="VM with name <%s> already exists" % name)
|
||||||
elif not (node, name):
|
elif not (node, name):
|
||||||
module.fail_json(msg='node, name is mandatory for creating/updating vm')
|
module.fail_json(msg='node, name is mandatory for creating/updating vm')
|
||||||
elif not node_check(proxmox, node):
|
elif not node_check(proxmox, node):
|
||||||
@@ -1267,6 +1283,7 @@ def main():
|
|||||||
startdate=module.params['startdate'],
|
startdate=module.params['startdate'],
|
||||||
startup=module.params['startup'],
|
startup=module.params['startup'],
|
||||||
tablet=module.params['tablet'],
|
tablet=module.params['tablet'],
|
||||||
|
tags=module.params['tags'],
|
||||||
target=module.params['target'],
|
target=module.params['target'],
|
||||||
tdf=module.params['tdf'],
|
tdf=module.params['tdf'],
|
||||||
template=module.params['template'],
|
template=module.params['template'],
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright: Tristan Le Guern (@Aversiste) <tleguern at bouledef.eu>
|
# Copyright: Tristan Le Guern (@tleguern) <tleguern at bouledef.eu>
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import absolute_import, division, print_function
|
||||||
@@ -25,7 +25,7 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Filter on a specifc storage type.
|
- Filter on a specifc storage type.
|
||||||
type: str
|
type: str
|
||||||
author: Tristan Le Guern (@Aversiste)
|
author: Tristan Le Guern (@tleguern)
|
||||||
extends_documentation_fragment: community.general.proxmox.documentation
|
extends_documentation_fragment: community.general.proxmox.documentation
|
||||||
notes:
|
notes:
|
||||||
- Storage specific options can be returned by this module, please look at the documentation at U(https://pve.proxmox.com/wiki/Storage).
|
- Storage specific options can be returned by this module, please look at the documentation at U(https://pve.proxmox.com/wiki/Storage).
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Restrict results to a specific user ID, which is a concatenation of a user and domain parts.
|
- Restrict results to a specific user ID, which is a concatenation of a user and domain parts.
|
||||||
type: str
|
type: str
|
||||||
author: Tristan Le Guern (@Aversiste)
|
author: Tristan Le Guern (@tleguern)
|
||||||
extends_documentation_fragment: community.general.proxmox.documentation
|
extends_documentation_fragment: community.general.proxmox.documentation
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|||||||
@@ -1229,24 +1229,6 @@ class RHEV(object):
|
|||||||
self.__get_conn()
|
self.__get_conn()
|
||||||
return self.conn.set_VM_Host(vmname, vmhost)
|
return self.conn.set_VM_Host(vmname, vmhost)
|
||||||
|
|
||||||
# pylint: disable=unreachable
|
|
||||||
VM = self.conn.get_VM(vmname)
|
|
||||||
HOST = self.conn.get_Host(vmhost)
|
|
||||||
|
|
||||||
if VM.placement_policy.host is None:
|
|
||||||
self.conn.set_VM_Host(vmname, vmhost)
|
|
||||||
elif str(VM.placement_policy.host.id) != str(HOST.id):
|
|
||||||
self.conn.set_VM_Host(vmname, vmhost)
|
|
||||||
else:
|
|
||||||
setMsg("VM's startup host was already set to " + vmhost)
|
|
||||||
checkFail()
|
|
||||||
|
|
||||||
if str(VM.status.state) == "up":
|
|
||||||
self.conn.migrate_VM(vmname, vmhost)
|
|
||||||
checkFail()
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def setHost(self, hostname, cluster, ifaces):
|
def setHost(self, hostname, cluster, ifaces):
|
||||||
self.__get_conn()
|
self.__get_conn()
|
||||||
return self.conn.set_Host(hostname, cluster, ifaces)
|
return self.conn.set_Host(hostname, cluster, ifaces)
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from __future__ import absolute_import, division, print_function
|
|||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r'''
|
||||||
---
|
---
|
||||||
module: terraform
|
module: terraform
|
||||||
short_description: Manages a Terraform deployment (and plans)
|
short_description: Manages a Terraform deployment (and plans)
|
||||||
@@ -177,24 +177,31 @@ command:
|
|||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
import tempfile
|
import tempfile
|
||||||
|
from distutils.version import LooseVersion
|
||||||
from ansible.module_utils.six.moves import shlex_quote
|
from ansible.module_utils.six.moves import shlex_quote
|
||||||
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
|
||||||
DESTROY_ARGS = ('destroy', '-no-color', '-force')
|
|
||||||
APPLY_ARGS = ('apply', '-no-color', '-input=false', '-auto-approve=true')
|
|
||||||
module = None
|
module = None
|
||||||
|
|
||||||
|
|
||||||
def preflight_validation(bin_path, project_path, variables_args=None, plan_file=None):
|
def get_version(bin_path):
|
||||||
|
extract_version = module.run_command([bin_path, 'version', '-json'])
|
||||||
|
terraform_version = (json.loads(extract_version[1]))['terraform_version']
|
||||||
|
return terraform_version
|
||||||
|
|
||||||
|
|
||||||
|
def preflight_validation(bin_path, project_path, version, variables_args=None, plan_file=None):
|
||||||
if project_path in [None, ''] or '/' not in project_path:
|
if project_path in [None, ''] or '/' not in project_path:
|
||||||
module.fail_json(msg="Path for Terraform project can not be None or ''.")
|
module.fail_json(msg="Path for Terraform project can not be None or ''.")
|
||||||
if not os.path.exists(bin_path):
|
if not os.path.exists(bin_path):
|
||||||
module.fail_json(msg="Path for Terraform binary '{0}' doesn't exist on this host - check the path and try again please.".format(bin_path))
|
module.fail_json(msg="Path for Terraform binary '{0}' doesn't exist on this host - check the path and try again please.".format(bin_path))
|
||||||
if not os.path.isdir(project_path):
|
if not os.path.isdir(project_path):
|
||||||
module.fail_json(msg="Path for Terraform project '{0}' doesn't exist on this host - check the path and try again please.".format(project_path))
|
module.fail_json(msg="Path for Terraform project '{0}' doesn't exist on this host - check the path and try again please.".format(project_path))
|
||||||
|
if LooseVersion(version) < LooseVersion('0.15.0'):
|
||||||
rc, out, err = module.run_command([bin_path, 'validate'] + variables_args, check_rc=True, cwd=project_path, use_unsafe_shell=True)
|
rc, out, err = module.run_command([bin_path, 'validate'] + variables_args, check_rc=True, cwd=project_path)
|
||||||
|
else:
|
||||||
|
rc, out, err = module.run_command([bin_path, 'validate'], check_rc=True, cwd=project_path)
|
||||||
|
|
||||||
|
|
||||||
def _state_args(state_file):
|
def _state_args(state_file):
|
||||||
@@ -267,7 +274,7 @@ def build_plan(command, project_path, variables_args, state_file, targets, state
|
|||||||
|
|
||||||
plan_command.extend(_state_args(state_file))
|
plan_command.extend(_state_args(state_file))
|
||||||
|
|
||||||
rc, out, err = module.run_command(plan_command + variables_args, cwd=project_path, use_unsafe_shell=True)
|
rc, out, err = module.run_command(plan_command + variables_args, cwd=project_path)
|
||||||
|
|
||||||
if rc == 0:
|
if rc == 0:
|
||||||
# no changes
|
# no changes
|
||||||
@@ -326,6 +333,15 @@ def main():
|
|||||||
else:
|
else:
|
||||||
command = [module.get_bin_path('terraform', required=True)]
|
command = [module.get_bin_path('terraform', required=True)]
|
||||||
|
|
||||||
|
checked_version = get_version(command[0])
|
||||||
|
|
||||||
|
if LooseVersion(checked_version) < LooseVersion('0.15.0'):
|
||||||
|
DESTROY_ARGS = ('destroy', '-no-color', '-force')
|
||||||
|
APPLY_ARGS = ('apply', '-no-color', '-input=false', '-auto-approve=true')
|
||||||
|
else:
|
||||||
|
DESTROY_ARGS = ('destroy', '-no-color', '-auto-approve')
|
||||||
|
APPLY_ARGS = ('apply', '-no-color', '-input=false', '-auto-approve')
|
||||||
|
|
||||||
if force_init:
|
if force_init:
|
||||||
init_plugins(command[0], project_path, backend_config, backend_config_files, init_reconfigure)
|
init_plugins(command[0], project_path, backend_config, backend_config_files, init_reconfigure)
|
||||||
|
|
||||||
@@ -351,7 +367,7 @@ def main():
|
|||||||
for f in variables_files:
|
for f in variables_files:
|
||||||
variables_args.extend(['-var-file', f])
|
variables_args.extend(['-var-file', f])
|
||||||
|
|
||||||
preflight_validation(command[0], project_path, variables_args)
|
preflight_validation(command[0], project_path, checked_version, variables_args)
|
||||||
|
|
||||||
if module.params.get('lock') is not None:
|
if module.params.get('lock') is not None:
|
||||||
if module.params.get('lock'):
|
if module.params.get('lock'):
|
||||||
@@ -382,7 +398,14 @@ def main():
|
|||||||
command.append(plan_file)
|
command.append(plan_file)
|
||||||
|
|
||||||
if needs_application and not module.check_mode and not state == 'planned':
|
if needs_application and not module.check_mode and not state == 'planned':
|
||||||
rc, out, err = module.run_command(command, check_rc=True, cwd=project_path)
|
rc, out, err = module.run_command(command, check_rc=False, cwd=project_path)
|
||||||
|
if rc != 0:
|
||||||
|
if workspace_ctx["current"] != workspace:
|
||||||
|
select_workspace(command[0], project_path, workspace_ctx["current"])
|
||||||
|
module.fail_json(msg=err.rstrip(), rc=rc, stdout=out,
|
||||||
|
stdout_lines=out.splitlines(), stderr=err,
|
||||||
|
stderr_lines=err.splitlines(),
|
||||||
|
cmd=' '.join(command))
|
||||||
# checks out to decide if changes were made during execution
|
# checks out to decide if changes were made during execution
|
||||||
if ' 0 added, 0 changed' not in out and not state == "absent" or ' 0 destroyed' not in out:
|
if ' 0 added, 0 changed' not in out and not state == "absent" or ' 0 destroyed' not in out:
|
||||||
changed = True
|
changed = True
|
||||||
|
|||||||
@@ -630,7 +630,7 @@ def main():
|
|||||||
ram=dict(type='float'),
|
ram=dict(type='float'),
|
||||||
hdds=dict(type='list', elements='dict'),
|
hdds=dict(type='list', elements='dict'),
|
||||||
count=dict(type='int', default=1),
|
count=dict(type='int', default=1),
|
||||||
ssh_key=dict(type='raw'),
|
ssh_key=dict(type='raw', no_log=False),
|
||||||
auto_increment=dict(type='bool', default=True),
|
auto_increment=dict(type='bool', default=True),
|
||||||
server=dict(type='str'),
|
server=dict(type='str'),
|
||||||
datacenter=dict(
|
datacenter=dict(
|
||||||
|
|||||||
276
plugins/modules/cloud/opennebula/one_template.py
Normal file
276
plugins/modules/cloud/opennebula/one_template.py
Normal file
@@ -0,0 +1,276 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
#
|
||||||
|
# Copyright: (c) 2021, Georg Gadinger <nilsding@nilsding.org>
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
# Make coding more python3-ish
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
DOCUMENTATION = '''
|
||||||
|
---
|
||||||
|
module: one_template
|
||||||
|
|
||||||
|
short_description: Manages OpenNebula templates
|
||||||
|
|
||||||
|
version_added: 2.4.0
|
||||||
|
|
||||||
|
requirements:
|
||||||
|
- pyone
|
||||||
|
|
||||||
|
description:
|
||||||
|
- "Manages OpenNebula templates."
|
||||||
|
|
||||||
|
options:
|
||||||
|
id:
|
||||||
|
description:
|
||||||
|
- A I(id) of the template you would like to manage. If not set then a
|
||||||
|
- new template will be created with the given I(name).
|
||||||
|
type: int
|
||||||
|
name:
|
||||||
|
description:
|
||||||
|
- A I(name) of the template you would like to manage. If a template with
|
||||||
|
- the given name does not exist it will be created, otherwise it will be
|
||||||
|
- managed by this module.
|
||||||
|
type: str
|
||||||
|
template:
|
||||||
|
description:
|
||||||
|
- A string containing the template contents.
|
||||||
|
type: str
|
||||||
|
state:
|
||||||
|
description:
|
||||||
|
- C(present) - state that is used to manage the template.
|
||||||
|
- C(absent) - delete the template.
|
||||||
|
choices: ["present", "absent"]
|
||||||
|
default: present
|
||||||
|
type: str
|
||||||
|
|
||||||
|
notes:
|
||||||
|
- Supports C(check_mode). Note that check mode always returns C(changed=true) for existing templates, even if the template would not actually change.
|
||||||
|
|
||||||
|
extends_documentation_fragment:
|
||||||
|
- community.general.opennebula
|
||||||
|
|
||||||
|
author:
|
||||||
|
- "Georg Gadinger (@nilsding)"
|
||||||
|
'''
|
||||||
|
|
||||||
|
EXAMPLES = '''
|
||||||
|
- name: Fetch the TEMPLATE by id
|
||||||
|
community.general.one_template:
|
||||||
|
id: 6459
|
||||||
|
register: result
|
||||||
|
|
||||||
|
- name: Print the TEMPLATE properties
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: result
|
||||||
|
|
||||||
|
- name: Fetch the TEMPLATE by name
|
||||||
|
community.general.one_template:
|
||||||
|
name: tf-prd-users-workerredis-p6379a
|
||||||
|
register: result
|
||||||
|
|
||||||
|
- name: Create a new or update an existing TEMPLATE
|
||||||
|
community.general.one_template:
|
||||||
|
name: generic-opensuse
|
||||||
|
template: |
|
||||||
|
CONTEXT = [
|
||||||
|
HOSTNAME = "generic-opensuse"
|
||||||
|
]
|
||||||
|
CPU = "1"
|
||||||
|
CUSTOM_ATTRIBUTE = ""
|
||||||
|
DISK = [
|
||||||
|
CACHE = "writeback",
|
||||||
|
DEV_PREFIX = "sd",
|
||||||
|
DISCARD = "unmap",
|
||||||
|
IMAGE = "opensuse-leap-15.2",
|
||||||
|
IMAGE_UNAME = "oneadmin",
|
||||||
|
IO = "threads",
|
||||||
|
SIZE = "" ]
|
||||||
|
MEMORY = "2048"
|
||||||
|
NIC = [
|
||||||
|
MODEL = "virtio",
|
||||||
|
NETWORK = "testnet",
|
||||||
|
NETWORK_UNAME = "oneadmin" ]
|
||||||
|
OS = [
|
||||||
|
ARCH = "x86_64",
|
||||||
|
BOOT = "disk0" ]
|
||||||
|
SCHED_REQUIREMENTS = "CLUSTER_ID=\\"100\\""
|
||||||
|
VCPU = "2"
|
||||||
|
|
||||||
|
- name: Delete the TEMPLATE by id
|
||||||
|
community.general.one_template:
|
||||||
|
id: 6459
|
||||||
|
state: absent
|
||||||
|
'''
|
||||||
|
|
||||||
|
RETURN = '''
|
||||||
|
id:
|
||||||
|
description: template id
|
||||||
|
type: int
|
||||||
|
returned: when I(state=present)
|
||||||
|
sample: 153
|
||||||
|
name:
|
||||||
|
description: template name
|
||||||
|
type: str
|
||||||
|
returned: when I(state=present)
|
||||||
|
sample: app1
|
||||||
|
template:
|
||||||
|
description: the parsed template
|
||||||
|
type: dict
|
||||||
|
returned: when I(state=present)
|
||||||
|
group_id:
|
||||||
|
description: template's group id
|
||||||
|
type: int
|
||||||
|
returned: when I(state=present)
|
||||||
|
sample: 1
|
||||||
|
group_name:
|
||||||
|
description: template's group name
|
||||||
|
type: str
|
||||||
|
returned: when I(state=present)
|
||||||
|
sample: one-users
|
||||||
|
owner_id:
|
||||||
|
description: template's owner id
|
||||||
|
type: int
|
||||||
|
returned: when I(state=present)
|
||||||
|
sample: 143
|
||||||
|
owner_name:
|
||||||
|
description: template's owner name
|
||||||
|
type: str
|
||||||
|
returned: when I(state=present)
|
||||||
|
sample: ansible-test
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
from ansible_collections.community.general.plugins.module_utils.opennebula import OpenNebulaModule
|
||||||
|
|
||||||
|
|
||||||
|
class TemplateModule(OpenNebulaModule):
|
||||||
|
def __init__(self):
|
||||||
|
argument_spec = dict(
|
||||||
|
id=dict(type='int', required=False),
|
||||||
|
name=dict(type='str', required=False),
|
||||||
|
state=dict(type='str', choices=['present', 'absent'], default='present'),
|
||||||
|
template=dict(type='str', required=False),
|
||||||
|
)
|
||||||
|
|
||||||
|
mutually_exclusive = [
|
||||||
|
['id', 'name']
|
||||||
|
]
|
||||||
|
|
||||||
|
required_one_of = [('id', 'name')]
|
||||||
|
|
||||||
|
required_if = [
|
||||||
|
['state', 'present', ['template']]
|
||||||
|
]
|
||||||
|
|
||||||
|
OpenNebulaModule.__init__(self,
|
||||||
|
argument_spec,
|
||||||
|
supports_check_mode=True,
|
||||||
|
mutually_exclusive=mutually_exclusive,
|
||||||
|
required_one_of=required_one_of,
|
||||||
|
required_if=required_if)
|
||||||
|
|
||||||
|
def run(self, one, module, result):
|
||||||
|
params = module.params
|
||||||
|
id = params.get('id')
|
||||||
|
name = params.get('name')
|
||||||
|
desired_state = params.get('state')
|
||||||
|
template_data = params.get('template')
|
||||||
|
|
||||||
|
self.result = {}
|
||||||
|
|
||||||
|
template = self.get_template_instance(id, name)
|
||||||
|
needs_creation = False
|
||||||
|
if not template and desired_state != 'absent':
|
||||||
|
if id:
|
||||||
|
module.fail_json(msg="There is no template with id=" + str(id))
|
||||||
|
else:
|
||||||
|
needs_creation = True
|
||||||
|
|
||||||
|
if desired_state == 'absent':
|
||||||
|
self.result = self.delete_template(template)
|
||||||
|
else:
|
||||||
|
if needs_creation:
|
||||||
|
self.result = self.create_template(name, template_data)
|
||||||
|
else:
|
||||||
|
self.result = self.update_template(template, template_data)
|
||||||
|
|
||||||
|
self.exit()
|
||||||
|
|
||||||
|
def get_template(self, predicate):
|
||||||
|
# -3 means "Resources belonging to the user"
|
||||||
|
# the other two parameters are used for pagination, -1 for both essentially means "return all"
|
||||||
|
pool = self.one.templatepool.info(-3, -1, -1)
|
||||||
|
|
||||||
|
for template in pool.VMTEMPLATE:
|
||||||
|
if predicate(template):
|
||||||
|
return template
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_template_by_id(self, template_id):
|
||||||
|
return self.get_template(lambda template: (template.ID == template_id))
|
||||||
|
|
||||||
|
def get_template_by_name(self, template_name):
|
||||||
|
return self.get_template(lambda template: (template.NAME == template_name))
|
||||||
|
|
||||||
|
def get_template_instance(self, requested_id, requested_name):
|
||||||
|
if requested_id:
|
||||||
|
return self.get_template_by_id(requested_id)
|
||||||
|
else:
|
||||||
|
return self.get_template_by_name(requested_name)
|
||||||
|
|
||||||
|
def get_template_info(self, template):
|
||||||
|
info = {
|
||||||
|
'id': template.ID,
|
||||||
|
'name': template.NAME,
|
||||||
|
'template': template.TEMPLATE,
|
||||||
|
'user_name': template.UNAME,
|
||||||
|
'user_id': template.UID,
|
||||||
|
'group_name': template.GNAME,
|
||||||
|
'group_id': template.GID,
|
||||||
|
}
|
||||||
|
|
||||||
|
return info
|
||||||
|
|
||||||
|
def create_template(self, name, template_data):
|
||||||
|
if not self.module.check_mode:
|
||||||
|
self.one.template.allocate("NAME = \"" + name + "\"\n" + template_data)
|
||||||
|
|
||||||
|
result = self.get_template_info(self.get_template_by_name(name))
|
||||||
|
result['changed'] = True
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def update_template(self, template, template_data):
|
||||||
|
if not self.module.check_mode:
|
||||||
|
# 0 = replace the whole template
|
||||||
|
self.one.template.update(template.ID, template_data, 0)
|
||||||
|
|
||||||
|
result = self.get_template_info(self.get_template_by_id(template.ID))
|
||||||
|
if self.module.check_mode:
|
||||||
|
# Unfortunately it is not easy to detect if the template would have changed, therefore always report a change here.
|
||||||
|
result['changed'] = True
|
||||||
|
else:
|
||||||
|
# if the previous parsed template data is not equal to the updated one, this has changed
|
||||||
|
result['changed'] = template.TEMPLATE != result['template']
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def delete_template(self, template):
|
||||||
|
if not template:
|
||||||
|
return {'changed': False}
|
||||||
|
|
||||||
|
if not self.module.check_mode:
|
||||||
|
self.one.template.delete(template.ID)
|
||||||
|
|
||||||
|
return {'changed': True}
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
TemplateModule().run_module()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
@@ -752,11 +752,20 @@ def get_vm_info(client, vm):
|
|||||||
if 'NIC' in vm.TEMPLATE:
|
if 'NIC' in vm.TEMPLATE:
|
||||||
if isinstance(vm.TEMPLATE['NIC'], list):
|
if isinstance(vm.TEMPLATE['NIC'], list):
|
||||||
for nic in vm.TEMPLATE['NIC']:
|
for nic in vm.TEMPLATE['NIC']:
|
||||||
networks_info.append({'ip': nic['IP'], 'mac': nic['MAC'], 'name': nic['NETWORK'], 'security_groups': nic['SECURITY_GROUPS']})
|
networks_info.append({
|
||||||
|
'ip': nic.get('IP', ''),
|
||||||
|
'mac': nic.get('MAC', ''),
|
||||||
|
'name': nic.get('NETWORK', ''),
|
||||||
|
'security_groups': nic.get('SECURITY_GROUPS', '')
|
||||||
|
})
|
||||||
else:
|
else:
|
||||||
networks_info.append(
|
networks_info.append({
|
||||||
{'ip': vm.TEMPLATE['NIC']['IP'], 'mac': vm.TEMPLATE['NIC']['MAC'],
|
'ip': vm.TEMPLATE['NIC'].get('IP', ''),
|
||||||
'name': vm.TEMPLATE['NIC']['NETWORK'], 'security_groups': vm.TEMPLATE['NIC']['SECURITY_GROUPS']})
|
'mac': vm.TEMPLATE['NIC'].get('MAC', ''),
|
||||||
|
'name': vm.TEMPLATE['NIC'].get('NETWORK', ''),
|
||||||
|
'security_groups':
|
||||||
|
vm.TEMPLATE['NIC'].get('SECURITY_GROUPS', '')
|
||||||
|
})
|
||||||
import time
|
import time
|
||||||
|
|
||||||
current_time = time.localtime()
|
current_time = time.localtime()
|
||||||
|
|||||||
@@ -162,7 +162,6 @@ def waitForTaskDone(client, name, taskId, timeout):
|
|||||||
currentTimeout -= 5
|
currentTimeout -= 5
|
||||||
if currentTimeout < 0:
|
if currentTimeout < 0:
|
||||||
return False
|
return False
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|||||||
@@ -111,7 +111,6 @@ ovirt_affinity_labels:
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -52,7 +52,6 @@ ovirt_api:
|
|||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -73,7 +73,6 @@ ovirt_clusters:
|
|||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -56,7 +56,6 @@ ovirt_datacenters:
|
|||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -72,7 +72,6 @@ ovirt_disks:
|
|||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -103,7 +103,6 @@ ovirt_events:
|
|||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -87,7 +87,6 @@ ovirt_external_providers:
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -71,7 +71,6 @@ ovirt_groups:
|
|||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -78,7 +78,6 @@ ovirt_hosts:
|
|||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -92,7 +92,6 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -73,7 +73,6 @@ ovirt_networks:
|
|||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -76,7 +76,6 @@ ovirt_nics:
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -88,7 +88,6 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -76,7 +76,6 @@ ovirt_quotas:
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -65,7 +65,6 @@ ovirt_snapshots:
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -74,7 +74,6 @@ ovirt_storage_domains:
|
|||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -79,7 +79,6 @@ ovirt_storage_templates:
|
|||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -79,7 +79,6 @@ ovirt_storage_vms:
|
|||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -95,7 +95,6 @@ ovirt_tags:
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -72,7 +72,6 @@ ovirt_templates:
|
|||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -71,7 +71,6 @@ ovirt_users:
|
|||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -102,7 +102,6 @@ ovirt_vms:
|
|||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -71,7 +71,6 @@ ovirt_vm_pools:
|
|||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible.module_utils.common.removed import removed_module
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
from ansible_collections.community.general.plugins.module_utils._ovirt import (
|
||||||
check_sdk,
|
check_sdk,
|
||||||
|
|||||||
@@ -583,7 +583,7 @@ def main():
|
|||||||
volume_size=dict(type='int', default=10),
|
volume_size=dict(type='int', default=10),
|
||||||
disk_type=dict(choices=['HDD', 'SSD'], default='HDD'),
|
disk_type=dict(choices=['HDD', 'SSD'], default='HDD'),
|
||||||
image_password=dict(default=None, no_log=True),
|
image_password=dict(default=None, no_log=True),
|
||||||
ssh_keys=dict(type='list', elements='str', default=[]),
|
ssh_keys=dict(type='list', elements='str', default=[], no_log=False),
|
||||||
bus=dict(choices=['VIRTIO', 'IDE'], default='VIRTIO'),
|
bus=dict(choices=['VIRTIO', 'IDE'], default='VIRTIO'),
|
||||||
lan=dict(type='int', default=1),
|
lan=dict(type='int', default=1),
|
||||||
count=dict(type='int', default=1),
|
count=dict(type='int', default=1),
|
||||||
|
|||||||
@@ -376,7 +376,7 @@ def main():
|
|||||||
bus=dict(choices=['VIRTIO', 'IDE'], default='VIRTIO'),
|
bus=dict(choices=['VIRTIO', 'IDE'], default='VIRTIO'),
|
||||||
image=dict(),
|
image=dict(),
|
||||||
image_password=dict(no_log=True),
|
image_password=dict(no_log=True),
|
||||||
ssh_keys=dict(type='list', elements='str', default=[]),
|
ssh_keys=dict(type='list', elements='str', default=[], no_log=False),
|
||||||
disk_type=dict(choices=['HDD', 'SSD'], default='HDD'),
|
disk_type=dict(choices=['HDD', 'SSD'], default='HDD'),
|
||||||
licence_type=dict(default='UNKNOWN'),
|
licence_type=dict(default='UNKNOWN'),
|
||||||
count=dict(type='int', default=1),
|
count=dict(type='int', default=1),
|
||||||
|
|||||||
@@ -549,7 +549,7 @@ def main():
|
|||||||
password=dict(default='', required=False, type='str', no_log=True),
|
password=dict(default='', required=False, type='str', no_log=True),
|
||||||
account=dict(default='', required=False, type='str'),
|
account=dict(default='', required=False, type='str'),
|
||||||
application=dict(required=True, type='str'),
|
application=dict(required=True, type='str'),
|
||||||
keyset=dict(required=True, type='str'),
|
keyset=dict(required=True, type='str', no_log=False),
|
||||||
state=dict(default='present', type='str',
|
state=dict(default='present', type='str',
|
||||||
choices=['started', 'stopped', 'present', 'absent']),
|
choices=['started', 'stopped', 'present', 'absent']),
|
||||||
name=dict(required=True, type='str'), description=dict(type='str'),
|
name=dict(required=True, type='str'), description=dict(type='str'),
|
||||||
|
|||||||
@@ -110,6 +110,7 @@ options:
|
|||||||
with this image
|
with this image
|
||||||
instance_ids:
|
instance_ids:
|
||||||
type: list
|
type: list
|
||||||
|
elements: str
|
||||||
description:
|
description:
|
||||||
- list of instance ids, currently only used when state='absent' to
|
- list of instance ids, currently only used when state='absent' to
|
||||||
remove instances
|
remove instances
|
||||||
@@ -129,6 +130,7 @@ options:
|
|||||||
- Name to give the instance
|
- Name to give the instance
|
||||||
networks:
|
networks:
|
||||||
type: list
|
type: list
|
||||||
|
elements: str
|
||||||
description:
|
description:
|
||||||
- The network to attach to the instances. If specified, you must include
|
- The network to attach to the instances. If specified, you must include
|
||||||
ALL networks including the public and private interfaces. Can be C(id)
|
ALL networks including the public and private interfaces. Can be C(id)
|
||||||
@@ -810,11 +812,11 @@ def main():
|
|||||||
flavor=dict(),
|
flavor=dict(),
|
||||||
group=dict(),
|
group=dict(),
|
||||||
image=dict(),
|
image=dict(),
|
||||||
instance_ids=dict(type='list'),
|
instance_ids=dict(type='list', elements='str'),
|
||||||
key_name=dict(aliases=['keypair']),
|
key_name=dict(aliases=['keypair']),
|
||||||
meta=dict(type='dict', default={}),
|
meta=dict(type='dict', default={}),
|
||||||
name=dict(),
|
name=dict(),
|
||||||
networks=dict(type='list', default=['public', 'private']),
|
networks=dict(type='list', elements='str', default=['public', 'private']),
|
||||||
service=dict(),
|
service=dict(),
|
||||||
state=dict(default='present', choices=['present', 'absent']),
|
state=dict(default='present', choices=['present', 'absent']),
|
||||||
user_data=dict(no_log=True),
|
user_data=dict(no_log=True),
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ options:
|
|||||||
required: yes
|
required: yes
|
||||||
databases:
|
databases:
|
||||||
type: list
|
type: list
|
||||||
|
elements: str
|
||||||
description:
|
description:
|
||||||
- Name of the databases that the user can access
|
- Name of the databases that the user can access
|
||||||
default: []
|
default: []
|
||||||
@@ -189,7 +190,7 @@ def main():
|
|||||||
cdb_id=dict(type='str', required=True),
|
cdb_id=dict(type='str', required=True),
|
||||||
db_username=dict(type='str', required=True),
|
db_username=dict(type='str', required=True),
|
||||||
db_password=dict(type='str', required=True, no_log=True),
|
db_password=dict(type='str', required=True, no_log=True),
|
||||||
databases=dict(type='list', default=[]),
|
databases=dict(type='list', elements='str', default=[]),
|
||||||
host=dict(type='str', default='%'),
|
host=dict(type='str', default='%'),
|
||||||
state=dict(default='present', choices=['present', 'absent'])
|
state=dict(default='present', choices=['present', 'absent'])
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -53,6 +53,7 @@ options:
|
|||||||
- key pair to use on the instance
|
- key pair to use on the instance
|
||||||
loadbalancers:
|
loadbalancers:
|
||||||
type: list
|
type: list
|
||||||
|
elements: dict
|
||||||
description:
|
description:
|
||||||
- List of load balancer C(id) and C(port) hashes
|
- List of load balancer C(id) and C(port) hashes
|
||||||
max_entities:
|
max_entities:
|
||||||
@@ -78,6 +79,7 @@ options:
|
|||||||
required: true
|
required: true
|
||||||
networks:
|
networks:
|
||||||
type: list
|
type: list
|
||||||
|
elements: str
|
||||||
description:
|
description:
|
||||||
- The network to attach to the instances. If specified, you must include
|
- The network to attach to the instances. If specified, you must include
|
||||||
ALL networks including the public and private interfaces. Can be C(id)
|
ALL networks including the public and private interfaces. Can be C(id)
|
||||||
@@ -376,12 +378,12 @@ def main():
|
|||||||
flavor=dict(required=True),
|
flavor=dict(required=True),
|
||||||
image=dict(required=True),
|
image=dict(required=True),
|
||||||
key_name=dict(),
|
key_name=dict(),
|
||||||
loadbalancers=dict(type='list'),
|
loadbalancers=dict(type='list', elements='dict'),
|
||||||
meta=dict(type='dict', default={}),
|
meta=dict(type='dict', default={}),
|
||||||
min_entities=dict(type='int', required=True),
|
min_entities=dict(type='int', required=True),
|
||||||
max_entities=dict(type='int', required=True),
|
max_entities=dict(type='int', required=True),
|
||||||
name=dict(required=True),
|
name=dict(required=True),
|
||||||
networks=dict(type='list', default=['public', 'private']),
|
networks=dict(type='list', elements='str', default=['public', 'private']),
|
||||||
server_name=dict(required=True),
|
server_name=dict(required=True),
|
||||||
state=dict(default='present', choices=['present', 'absent']),
|
state=dict(default='present', choices=['present', 'absent']),
|
||||||
user_data=dict(no_log=True),
|
user_data=dict(no_log=True),
|
||||||
|
|||||||
@@ -119,20 +119,13 @@ class NicTag(object):
|
|||||||
return is_mac(self.mac.lower())
|
return is_mac(self.mac.lower())
|
||||||
|
|
||||||
def nictag_exists(self):
|
def nictag_exists(self):
|
||||||
cmd = [self.nictagadm_bin]
|
cmd = [self.nictagadm_bin, 'exists', self.name]
|
||||||
|
|
||||||
cmd.append('exists')
|
|
||||||
cmd.append(self.name)
|
|
||||||
|
|
||||||
(rc, dummy, dummy) = self.module.run_command(cmd)
|
(rc, dummy, dummy) = self.module.run_command(cmd)
|
||||||
|
|
||||||
return rc == 0
|
return rc == 0
|
||||||
|
|
||||||
def add_nictag(self):
|
def add_nictag(self):
|
||||||
cmd = [self.nictagadm_bin]
|
cmd = [self.nictagadm_bin, '-v', 'add']
|
||||||
|
|
||||||
cmd.append('-v')
|
|
||||||
cmd.append('add')
|
|
||||||
|
|
||||||
if self.etherstub:
|
if self.etherstub:
|
||||||
cmd.append('-l')
|
cmd.append('-l')
|
||||||
@@ -150,10 +143,7 @@ class NicTag(object):
|
|||||||
return self.module.run_command(cmd)
|
return self.module.run_command(cmd)
|
||||||
|
|
||||||
def delete_nictag(self):
|
def delete_nictag(self):
|
||||||
cmd = [self.nictagadm_bin]
|
cmd = [self.nictagadm_bin, '-v', 'delete']
|
||||||
|
|
||||||
cmd.append('-v')
|
|
||||||
cmd.append('delete')
|
|
||||||
|
|
||||||
if self.force:
|
if self.force:
|
||||||
cmd.append('-f')
|
cmd.append('-f')
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ options:
|
|||||||
manifest and 'published_date', 'published', 'source', 'clones',
|
manifest and 'published_date', 'published', 'source', 'clones',
|
||||||
and 'size'. More information can be found at U(https://smartos.org/man/1m/imgadm)
|
and 'size'. More information can be found at U(https://smartos.org/man/1m/imgadm)
|
||||||
under 'imgadm list'.
|
under 'imgadm list'.
|
||||||
|
type: str
|
||||||
'''
|
'''
|
||||||
|
|
||||||
EXAMPLES = '''
|
EXAMPLES = '''
|
||||||
@@ -71,10 +72,7 @@ class ImageFacts(object):
|
|||||||
self.filters = module.params['filters']
|
self.filters = module.params['filters']
|
||||||
|
|
||||||
def return_all_installed_images(self):
|
def return_all_installed_images(self):
|
||||||
cmd = [self.module.get_bin_path('imgadm')]
|
cmd = [self.module.get_bin_path('imgadm'), 'list', '-j']
|
||||||
|
|
||||||
cmd.append('list')
|
|
||||||
cmd.append('-j')
|
|
||||||
|
|
||||||
if self.filters:
|
if self.filters:
|
||||||
cmd.append(self.filters)
|
cmd.append(self.filters)
|
||||||
|
|||||||
@@ -233,7 +233,7 @@ options:
|
|||||||
description:
|
description:
|
||||||
- List of resolvers to be put into C(/etc/resolv.conf).
|
- List of resolvers to be put into C(/etc/resolv.conf).
|
||||||
type: list
|
type: list
|
||||||
elements: dict
|
elements: str
|
||||||
routes:
|
routes:
|
||||||
required: false
|
required: false
|
||||||
description:
|
description:
|
||||||
@@ -702,7 +702,7 @@ def main():
|
|||||||
vnc_password=dict(type='str', no_log=True),
|
vnc_password=dict(type='str', no_log=True),
|
||||||
disks=dict(type='list', elements='dict'),
|
disks=dict(type='list', elements='dict'),
|
||||||
nics=dict(type='list', elements='dict'),
|
nics=dict(type='list', elements='dict'),
|
||||||
resolvers=dict(type='list', elements='dict'),
|
resolvers=dict(type='list', elements='str'),
|
||||||
filesystems=dict(type='list', elements='dict'),
|
filesystems=dict(type='list', elements='dict'),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -404,7 +404,7 @@ def main():
|
|||||||
nic_speed=dict(type='int', choices=NIC_SPEEDS),
|
nic_speed=dict(type='int', choices=NIC_SPEEDS),
|
||||||
public_vlan=dict(type='str'),
|
public_vlan=dict(type='str'),
|
||||||
private_vlan=dict(type='str'),
|
private_vlan=dict(type='str'),
|
||||||
ssh_keys=dict(type='list', elements='str', default=[]),
|
ssh_keys=dict(type='list', elements='str', default=[], no_log=False),
|
||||||
post_uri=dict(type='str'),
|
post_uri=dict(type='str'),
|
||||||
state=dict(type='str', default='present', choices=STATES),
|
state=dict(type='str', default='present', choices=STATES),
|
||||||
wait=dict(type='bool', default=True),
|
wait=dict(type='bool', default=True),
|
||||||
|
|||||||
@@ -1448,7 +1448,7 @@ def main():
|
|||||||
iam_role_arn=dict(type='str'),
|
iam_role_arn=dict(type='str'),
|
||||||
iam_role_name=dict(type='str'),
|
iam_role_name=dict(type='str'),
|
||||||
image_id=dict(type='str', required=True),
|
image_id=dict(type='str', required=True),
|
||||||
key_pair=dict(type='str'),
|
key_pair=dict(type='str', no_log=False),
|
||||||
kubernetes=dict(type='dict'),
|
kubernetes=dict(type='dict'),
|
||||||
lifetime_period=dict(type='int'),
|
lifetime_period=dict(type='int'),
|
||||||
load_balancers=dict(type='list'),
|
load_balancers=dict(type='list'),
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
# Copyright: (c) 2018, Bojan Vitnik <bvitnik@mainstream.rs>
|
# Copyright: (c) 2018, Bojan Vitnik <bvitnik@mainstream.rs>
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import (absolute_import, division, print_function)
|
||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
DOCUMENTATION = r'''
|
DOCUMENTATION = r'''
|
||||||
@@ -24,14 +24,14 @@ notes:
|
|||||||
Citrix Hypervisor/XenServer SDK (downloadable from Citrix website). Copy the XenAPI.py file from the SDK to your Python site-packages on your
|
Citrix Hypervisor/XenServer SDK (downloadable from Citrix website). Copy the XenAPI.py file from the SDK to your Python site-packages on your
|
||||||
Ansible Control Node to use it. Latest version of the library can also be acquired from GitHub:
|
Ansible Control Node to use it. Latest version of the library can also be acquired from GitHub:
|
||||||
U(https://raw.githubusercontent.com/xapi-project/xen-api/master/scripts/examples/python/XenAPI/XenAPI.py)'
|
U(https://raw.githubusercontent.com/xapi-project/xen-api/master/scripts/examples/python/XenAPI/XenAPI.py)'
|
||||||
- 'If no scheme is specified in C(hostname), module defaults to C(http://) because C(https://) is problematic in most setups. Make sure you are
|
- 'If no scheme is specified in I(hostname), module defaults to C(http://) because C(https://) is problematic in most setups. Make sure you are
|
||||||
accessing XenServer host in trusted environment or use C(https://) scheme explicitly.'
|
accessing XenServer host in trusted environment or use C(https://) scheme explicitly.'
|
||||||
- 'To use C(https://) scheme for C(hostname) you have to either import host certificate to your OS certificate store or use C(validate_certs: no)
|
- 'To use C(https://) scheme for I(hostname) you have to either import host certificate to your OS certificate store or use I(validate_certs): C(no)
|
||||||
which requires XenAPI library from XenServer 7.2 SDK or newer and Python 2.7.9 or newer.'
|
which requires XenAPI library from XenServer 7.2 SDK or newer and Python 2.7.9 or newer.'
|
||||||
- 'Network configuration inside a guest OS, by using C(networks.type), C(networks.ip), C(networks.gateway) etc. parameters, is supported on
|
- 'Network configuration inside a guest OS, by using I(networks.type), I(networks.ip), I(networks.gateway) etc. parameters, is supported on
|
||||||
XenServer 7.0 or newer for Windows guests by using official XenServer Guest agent support for network configuration. The module will try to
|
XenServer 7.0 or newer for Windows guests by using official XenServer Guest agent support for network configuration. The module will try to
|
||||||
detect if such support is available and utilize it, else it will use a custom method of configuration via xenstore. Since XenServer Guest
|
detect if such support is available and utilize it, else it will use a custom method of configuration via xenstore. Since XenServer Guest
|
||||||
agent only support None and Static types of network configuration, where None means DHCP configured interface, C(networks.type) and C(networks.type6)
|
agent only support None and Static types of network configuration, where None means DHCP configured interface, I(networks.type) and I(networks.type6)
|
||||||
values C(none) and C(dhcp) have same effect. More info here:
|
values C(none) and C(dhcp) have same effect. More info here:
|
||||||
U(https://www.citrix.com/community/citrix-developer/citrix-hypervisor-developer/citrix-hypervisor-developing-products/citrix-hypervisor-staticip.html)'
|
U(https://www.citrix.com/community/citrix-developer/citrix-hypervisor-developer/citrix-hypervisor-developing-products/citrix-hypervisor-staticip.html)'
|
||||||
- 'On platforms without official support for network configuration inside a guest OS, network parameters will be written to xenstore
|
- 'On platforms without official support for network configuration inside a guest OS, network parameters will be written to xenstore
|
||||||
@@ -49,10 +49,10 @@ options:
|
|||||||
state:
|
state:
|
||||||
description:
|
description:
|
||||||
- Specify the state VM should be in.
|
- Specify the state VM should be in.
|
||||||
- If C(state) is set to C(present) and VM exists, ensure the VM configuration conforms to given parameters.
|
- If I(state) is set to C(present) and VM exists, ensure the VM configuration conforms to given parameters.
|
||||||
- If C(state) is set to C(present) and VM does not exist, then VM is deployed with given parameters.
|
- If I(state) is set to C(present) and VM does not exist, then VM is deployed with given parameters.
|
||||||
- If C(state) is set to C(absent) and VM exists, then VM is removed with its associated components.
|
- If I(state) is set to C(absent) and VM exists, then VM is removed with its associated components.
|
||||||
- If C(state) is set to C(poweredon) and VM does not exist, then VM is deployed with given parameters and powered on automatically.
|
- If I(state) is set to C(poweredon) and VM does not exist, then VM is deployed with given parameters and powered on automatically.
|
||||||
type: str
|
type: str
|
||||||
default: present
|
default: present
|
||||||
choices: [ present, absent, poweredon ]
|
choices: [ present, absent, poweredon ]
|
||||||
@@ -60,10 +60,9 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Name of the VM to work with.
|
- Name of the VM to work with.
|
||||||
- VMs running on XenServer do not necessarily have unique names. The module will fail if multiple VMs with same name are found.
|
- VMs running on XenServer do not necessarily have unique names. The module will fail if multiple VMs with same name are found.
|
||||||
- In case of multiple VMs with same name, use C(uuid) to uniquely specify VM to manage.
|
- In case of multiple VMs with same name, use I(uuid) to uniquely specify VM to manage.
|
||||||
- This parameter is case sensitive.
|
- This parameter is case sensitive.
|
||||||
type: str
|
type: str
|
||||||
required: yes
|
|
||||||
aliases: [ name_label ]
|
aliases: [ name_label ]
|
||||||
name_desc:
|
name_desc:
|
||||||
description:
|
description:
|
||||||
@@ -79,7 +78,7 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Name of a template, an existing VM (must be shut down) or a snapshot that should be used to create VM.
|
- Name of a template, an existing VM (must be shut down) or a snapshot that should be used to create VM.
|
||||||
- Templates/VMs/snapshots on XenServer do not necessarily have unique names. The module will fail if multiple templates with same name are found.
|
- Templates/VMs/snapshots on XenServer do not necessarily have unique names. The module will fail if multiple templates with same name are found.
|
||||||
- In case of multiple templates/VMs/snapshots with same name, use C(template_uuid) to uniquely specify source template.
|
- In case of multiple templates/VMs/snapshots with same name, use I(template_uuid) to uniquely specify source template.
|
||||||
- If VM already exists, this setting will be ignored.
|
- If VM already exists, this setting will be ignored.
|
||||||
- This parameter is case sensitive.
|
- This parameter is case sensitive.
|
||||||
type: str
|
type: str
|
||||||
@@ -104,56 +103,138 @@ options:
|
|||||||
hardware:
|
hardware:
|
||||||
description:
|
description:
|
||||||
- Manage VM's hardware parameters. VM needs to be shut down to reconfigure these parameters.
|
- Manage VM's hardware parameters. VM needs to be shut down to reconfigure these parameters.
|
||||||
- 'Valid parameters are:'
|
|
||||||
- ' - C(num_cpus) (integer): Number of CPUs.'
|
|
||||||
- ' - C(num_cpu_cores_per_socket) (integer): Number of Cores Per Socket. C(num_cpus) has to be a multiple of C(num_cpu_cores_per_socket).'
|
|
||||||
- ' - C(memory_mb) (integer): Amount of memory in MB.'
|
|
||||||
type: dict
|
type: dict
|
||||||
|
suboptions:
|
||||||
|
num_cpus:
|
||||||
|
description:
|
||||||
|
- Number of CPUs.
|
||||||
|
type: int
|
||||||
|
num_cpu_cores_per_socket:
|
||||||
|
description:
|
||||||
|
- Number of Cores Per Socket. I(num_cpus) has to be a multiple of I(num_cpu_cores_per_socket).
|
||||||
|
type: int
|
||||||
|
memory_mb:
|
||||||
|
description:
|
||||||
|
- Amount of memory in MB.
|
||||||
|
type: int
|
||||||
disks:
|
disks:
|
||||||
description:
|
description:
|
||||||
- A list of disks to add to VM.
|
- A list of disks to add to VM.
|
||||||
- All parameters are case sensitive.
|
- All parameters are case sensitive.
|
||||||
- Removing or detaching existing disks of VM is not supported.
|
- Removing or detaching existing disks of VM is not supported.
|
||||||
- 'Required parameters per entry:'
|
- New disks are required to have either a I(size) or one of I(size_[tb,gb,mb,kb,b]) parameters specified.
|
||||||
- ' - C(size_[tb,gb,mb,kb,b]) (integer): Disk storage size in specified unit. VM needs to be shut down to reconfigure this parameter.'
|
- VM needs to be shut down to reconfigure disk size.
|
||||||
- 'Optional parameters per entry:'
|
|
||||||
- ' - C(name) (string): Disk name. You can also use C(name_label) as an alias.'
|
|
||||||
- ' - C(name_desc) (string): Disk description.'
|
|
||||||
- ' - C(sr) (string): Storage Repository to create disk on. If not specified, will use default SR. Cannot be used for moving disk to other SR.'
|
|
||||||
- ' - C(sr_uuid) (string): UUID of a SR to create disk on. Use if SR name is not unique.'
|
|
||||||
type: list
|
type: list
|
||||||
elements: dict
|
elements: dict
|
||||||
aliases: [ disk ]
|
aliases: [ disk ]
|
||||||
|
suboptions:
|
||||||
|
size:
|
||||||
|
description:
|
||||||
|
- 'Disk size with unit. Unit must be: C(b), C(kb), C(mb), C(gb), C(tb). VM needs to be shut down to reconfigure this parameter.'
|
||||||
|
- If no unit is specified, size is assumed to be in bytes.
|
||||||
|
type: str
|
||||||
|
size_b:
|
||||||
|
description:
|
||||||
|
- Disk size in bytes.
|
||||||
|
type: str
|
||||||
|
size_kb:
|
||||||
|
description:
|
||||||
|
- Disk size in kilobytes.
|
||||||
|
type: str
|
||||||
|
size_mb:
|
||||||
|
description:
|
||||||
|
- Disk size in megabytes.
|
||||||
|
type: str
|
||||||
|
size_gb:
|
||||||
|
description:
|
||||||
|
- Disk size in gigabytes.
|
||||||
|
type: str
|
||||||
|
size_tb:
|
||||||
|
description:
|
||||||
|
- Disk size in terabytes.
|
||||||
|
type: str
|
||||||
|
name:
|
||||||
|
description:
|
||||||
|
- Disk name.
|
||||||
|
type: str
|
||||||
|
aliases: [ name_label ]
|
||||||
|
name_desc:
|
||||||
|
description:
|
||||||
|
- Disk description.
|
||||||
|
type: str
|
||||||
|
sr:
|
||||||
|
description:
|
||||||
|
- Storage Repository to create disk on. If not specified, will use default SR. Cannot be used for moving disk to other SR.
|
||||||
|
type: str
|
||||||
|
sr_uuid:
|
||||||
|
description:
|
||||||
|
- UUID of a SR to create disk on. Use if SR name is not unique.
|
||||||
|
type: str
|
||||||
cdrom:
|
cdrom:
|
||||||
description:
|
description:
|
||||||
- A CD-ROM configuration for the VM.
|
- A CD-ROM configuration for the VM.
|
||||||
- All parameters are case sensitive.
|
- All parameters are case sensitive.
|
||||||
- 'Valid parameters are:'
|
|
||||||
- ' - C(type) (string): The type of CD-ROM, valid options are C(none) or C(iso). With C(none) the CD-ROM device will be present but empty.'
|
|
||||||
- ' - C(iso_name) (string): The file name of an ISO image from one of the XenServer ISO Libraries (implies C(type: iso)).
|
|
||||||
Required if C(type) is set to C(iso).'
|
|
||||||
type: dict
|
type: dict
|
||||||
|
suboptions:
|
||||||
|
type:
|
||||||
|
description:
|
||||||
|
- The type of CD-ROM. With C(none) the CD-ROM device will be present but empty.
|
||||||
|
type: str
|
||||||
|
choices: [ none, iso ]
|
||||||
|
iso_name:
|
||||||
|
description:
|
||||||
|
- 'The file name of an ISO image from one of the XenServer ISO Libraries (implies I(type): C(iso)).'
|
||||||
|
- Required if I(type) is set to C(iso).
|
||||||
|
type: str
|
||||||
networks:
|
networks:
|
||||||
description:
|
description:
|
||||||
- A list of networks (in the order of the NICs).
|
- A list of networks (in the order of the NICs).
|
||||||
- All parameters are case sensitive.
|
- All parameters are case sensitive.
|
||||||
- 'Required parameters per entry:'
|
- Name is required for new NICs. Other parameters are optional in all cases.
|
||||||
- ' - C(name) (string): Name of a XenServer network to attach the network interface to. You can also use C(name_label) as an alias.'
|
|
||||||
- 'Optional parameters per entry (used for VM hardware):'
|
|
||||||
- ' - C(mac) (string): Customize MAC address of the interface.'
|
|
||||||
- 'Optional parameters per entry (used for OS customization):'
|
|
||||||
- ' - C(type) (string): Type of IPv4 assignment, valid options are C(none), C(dhcp) or C(static). Value C(none) means whatever is default for OS.
|
|
||||||
On some operating systems it could be DHCP configured (e.g. Windows) or unconfigured interface (e.g. Linux).'
|
|
||||||
- ' - C(ip) (string): Static IPv4 address (implies C(type: static)). Can include prefix in format <IPv4 address>/<prefix> instead of using C(netmask).'
|
|
||||||
- ' - C(netmask) (string): Static IPv4 netmask required for C(ip) if prefix is not specified.'
|
|
||||||
- ' - C(gateway) (string): Static IPv4 gateway.'
|
|
||||||
- ' - C(type6) (string): Type of IPv6 assignment, valid options are C(none), C(dhcp) or C(static). Value C(none) means whatever is default for OS.
|
|
||||||
On some operating systems it could be DHCP configured (e.g. Windows) or unconfigured interface (e.g. Linux).'
|
|
||||||
- ' - C(ip6) (string): Static IPv6 address (implies C(type6: static)) with prefix in format <IPv6 address>/<prefix>.'
|
|
||||||
- ' - C(gateway6) (string): Static IPv6 gateway.'
|
|
||||||
type: list
|
type: list
|
||||||
elements: dict
|
elements: dict
|
||||||
aliases: [ network ]
|
aliases: [ network ]
|
||||||
|
suboptions:
|
||||||
|
name:
|
||||||
|
description:
|
||||||
|
- Name of a XenServer network to attach the network interface to.
|
||||||
|
type: str
|
||||||
|
aliases: [ name_label ]
|
||||||
|
mac:
|
||||||
|
description:
|
||||||
|
- Customize MAC address of the interface.
|
||||||
|
type: str
|
||||||
|
type:
|
||||||
|
description:
|
||||||
|
- Type of IPv4 assignment. Value C(none) means whatever is default for OS.
|
||||||
|
- On some operating systems it could be DHCP configured (e.g. Windows) or unconfigured interface (e.g. Linux).
|
||||||
|
type: str
|
||||||
|
choices: [ none, dhcp, static ]
|
||||||
|
ip:
|
||||||
|
description:
|
||||||
|
- 'Static IPv4 address (implies I(type): C(static)). Can include prefix in format C(<IPv4 address>/<prefix>) instead of using C(netmask).'
|
||||||
|
type: str
|
||||||
|
netmask:
|
||||||
|
description:
|
||||||
|
- Static IPv4 netmask required for I(ip) if prefix is not specified.
|
||||||
|
type: str
|
||||||
|
gateway:
|
||||||
|
description:
|
||||||
|
- Static IPv4 gateway.
|
||||||
|
type: str
|
||||||
|
type6:
|
||||||
|
description:
|
||||||
|
- Type of IPv6 assignment. Value C(none) means whatever is default for OS.
|
||||||
|
type: str
|
||||||
|
choices: [ none, dhcp, static ]
|
||||||
|
ip6:
|
||||||
|
description:
|
||||||
|
- 'Static IPv6 address (implies I(type6): C(static)) with prefix in format C(<IPv6 address>/<prefix>).'
|
||||||
|
type: str
|
||||||
|
gateway6:
|
||||||
|
description:
|
||||||
|
- Static IPv6 gateway.
|
||||||
|
type: str
|
||||||
home_server:
|
home_server:
|
||||||
description:
|
description:
|
||||||
- Name of a XenServer host that will be a Home Server for the VM.
|
- Name of a XenServer host that will be a Home Server for the VM.
|
||||||
@@ -163,18 +244,29 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Define a list of custom VM params to set on VM.
|
- Define a list of custom VM params to set on VM.
|
||||||
- Useful for advanced users familiar with managing VM params trough xe CLI.
|
- Useful for advanced users familiar with managing VM params trough xe CLI.
|
||||||
- A custom value object takes two fields C(key) and C(value) (see example below).
|
- A custom value object takes two fields I(key) and I(value) (see example below).
|
||||||
type: list
|
type: list
|
||||||
elements: dict
|
elements: dict
|
||||||
|
suboptions:
|
||||||
|
key:
|
||||||
|
description:
|
||||||
|
- VM param name.
|
||||||
|
type: str
|
||||||
|
required: yes
|
||||||
|
value:
|
||||||
|
description:
|
||||||
|
- VM param value.
|
||||||
|
type: raw
|
||||||
|
required: yes
|
||||||
wait_for_ip_address:
|
wait_for_ip_address:
|
||||||
description:
|
description:
|
||||||
- Wait until XenServer detects an IP address for the VM. If C(state) is set to C(absent), this parameter is ignored.
|
- Wait until XenServer detects an IP address for the VM. If I(state) is set to C(absent), this parameter is ignored.
|
||||||
- This requires XenServer Tools to be preinstalled on the VM to work properly.
|
- This requires XenServer Tools to be preinstalled on the VM to work properly.
|
||||||
type: bool
|
type: bool
|
||||||
default: no
|
default: no
|
||||||
state_change_timeout:
|
state_change_timeout:
|
||||||
description:
|
description:
|
||||||
- 'By default, module will wait indefinitely for VM to accquire an IP address if C(wait_for_ip_address: yes).'
|
- 'By default, module will wait indefinitely for VM to accquire an IP address if I(wait_for_ip_address): C(yes).'
|
||||||
- If this parameter is set to positive value, the module will instead wait specified number of seconds for the state change.
|
- If this parameter is set to positive value, the module will instead wait specified number of seconds for the state change.
|
||||||
- In case of timeout, module will generate an error message.
|
- In case of timeout, module will generate an error message.
|
||||||
type: int
|
type: int
|
||||||
@@ -441,11 +533,12 @@ except ImportError:
|
|||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
from ansible.module_utils.common.network import is_mac
|
from ansible.module_utils.common.network import is_mac
|
||||||
from ansible.module_utils import six
|
from ansible.module_utils import six
|
||||||
from ansible_collections.community.general.plugins.module_utils.xenserver import (xenserver_common_argument_spec, XAPI, XenServerObject, get_object_ref,
|
from ansible_collections.community.general.plugins.module_utils.xenserver import (
|
||||||
gather_vm_params, gather_vm_facts, set_vm_power_state,
|
xenserver_common_argument_spec, XenServerObject, get_object_ref,
|
||||||
wait_for_vm_ip_address, is_valid_ip_addr, is_valid_ip_netmask,
|
gather_vm_params, gather_vm_facts, set_vm_power_state,
|
||||||
is_valid_ip_prefix, ip_prefix_to_netmask, ip_netmask_to_prefix,
|
wait_for_vm_ip_address, is_valid_ip_addr, is_valid_ip_netmask,
|
||||||
is_valid_ip6_addr, is_valid_ip6_prefix)
|
is_valid_ip_prefix, ip_prefix_to_netmask, ip_netmask_to_prefix,
|
||||||
|
is_valid_ip6_addr, is_valid_ip6_prefix)
|
||||||
|
|
||||||
|
|
||||||
class XenServerVM(XenServerObject):
|
class XenServerVM(XenServerObject):
|
||||||
@@ -1839,7 +1932,7 @@ def main():
|
|||||||
type='list',
|
type='list',
|
||||||
elements='dict',
|
elements='dict',
|
||||||
options=dict(
|
options=dict(
|
||||||
key=dict(type='str', required=True),
|
key=dict(type='str', required=True, no_log=False),
|
||||||
value=dict(type='raw', required=True),
|
value=dict(type='raw', required=True),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -189,7 +189,24 @@ from collections import defaultdict
|
|||||||
from ansible.module_utils.basic import to_text, AnsibleModule
|
from ansible.module_utils.basic import to_text, AnsibleModule
|
||||||
|
|
||||||
|
|
||||||
RULE_SCOPES = ["agent", "event", "key", "keyring", "node", "operator", "query", "service", "session"]
|
RULE_SCOPES = [
|
||||||
|
"agent",
|
||||||
|
"agent_prefix",
|
||||||
|
"event",
|
||||||
|
"event_prefix",
|
||||||
|
"key",
|
||||||
|
"key_prefix",
|
||||||
|
"keyring",
|
||||||
|
"node",
|
||||||
|
"node_prefix",
|
||||||
|
"operator",
|
||||||
|
"query",
|
||||||
|
"query_prefix",
|
||||||
|
"service",
|
||||||
|
"service_prefix",
|
||||||
|
"session",
|
||||||
|
"session_prefix",
|
||||||
|
]
|
||||||
|
|
||||||
MANAGEMENT_PARAMETER_NAME = "mgmt_token"
|
MANAGEMENT_PARAMETER_NAME = "mgmt_token"
|
||||||
HOST_PARAMETER_NAME = "host"
|
HOST_PARAMETER_NAME = "host"
|
||||||
@@ -229,7 +246,7 @@ _ARGUMENT_SPEC = {
|
|||||||
PORT_PARAMETER_NAME: dict(default=8500, type='int'),
|
PORT_PARAMETER_NAME: dict(default=8500, type='int'),
|
||||||
RULES_PARAMETER_NAME: dict(type='list', elements='dict'),
|
RULES_PARAMETER_NAME: dict(type='list', elements='dict'),
|
||||||
STATE_PARAMETER_NAME: dict(default=PRESENT_STATE_VALUE, choices=[PRESENT_STATE_VALUE, ABSENT_STATE_VALUE]),
|
STATE_PARAMETER_NAME: dict(default=PRESENT_STATE_VALUE, choices=[PRESENT_STATE_VALUE, ABSENT_STATE_VALUE]),
|
||||||
TOKEN_PARAMETER_NAME: dict(),
|
TOKEN_PARAMETER_NAME: dict(no_log=False),
|
||||||
TOKEN_TYPE_PARAMETER_NAME: dict(choices=[CLIENT_TOKEN_TYPE_VALUE, MANAGEMENT_TOKEN_TYPE_VALUE],
|
TOKEN_TYPE_PARAMETER_NAME: dict(choices=[CLIENT_TOKEN_TYPE_VALUE, MANAGEMENT_TOKEN_TYPE_VALUE],
|
||||||
default=CLIENT_TOKEN_TYPE_VALUE)
|
default=CLIENT_TOKEN_TYPE_VALUE)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -297,7 +297,7 @@ def main():
|
|||||||
argument_spec=dict(
|
argument_spec=dict(
|
||||||
cas=dict(type='str'),
|
cas=dict(type='str'),
|
||||||
flags=dict(type='str'),
|
flags=dict(type='str'),
|
||||||
key=dict(type='str', required=True),
|
key=dict(type='str', required=True, no_log=False),
|
||||||
host=dict(type='str', default='localhost'),
|
host=dict(type='str', default='localhost'),
|
||||||
scheme=dict(type='str', default='http'),
|
scheme=dict(type='str', default='http'),
|
||||||
validate_certs=dict(type='bool', default=True),
|
validate_certs=dict(type='bool', default=True),
|
||||||
|
|||||||
@@ -134,7 +134,7 @@ def run_module():
|
|||||||
# define the available arguments/parameters that a user can pass to
|
# define the available arguments/parameters that a user can pass to
|
||||||
# the module
|
# the module
|
||||||
module_args = dict(
|
module_args = dict(
|
||||||
key=dict(type='str', required=True),
|
key=dict(type='str', required=True, no_log=False),
|
||||||
value=dict(type='str', required=True),
|
value=dict(type='str', required=True),
|
||||||
host=dict(type='str', default='localhost'),
|
host=dict(type='str', default='localhost'),
|
||||||
port=dict(type='int', default=2379),
|
port=dict(type='int', default=2379),
|
||||||
|
|||||||
@@ -36,13 +36,13 @@ seealso:
|
|||||||
|
|
||||||
EXAMPLES = '''
|
EXAMPLES = '''
|
||||||
- name: Get info for job awx
|
- name: Get info for job awx
|
||||||
community.general.nomad_job:
|
community.general.nomad_job_info:
|
||||||
host: localhost
|
host: localhost
|
||||||
name: awx
|
name: awx
|
||||||
register: result
|
register: result
|
||||||
|
|
||||||
- name: List Nomad jobs
|
- name: List Nomad jobs
|
||||||
community.general.nomad_job:
|
community.general.nomad_job_info:
|
||||||
host: localhost
|
host: localhost
|
||||||
register: result
|
register: result
|
||||||
|
|
||||||
|
|||||||
@@ -190,9 +190,9 @@ def run_module():
|
|||||||
min_cluster_size=dict(type='int', required=False, default=1),
|
min_cluster_size=dict(type='int', required=False, default=1),
|
||||||
target_cluster_size=dict(type='int', required=False, default=None),
|
target_cluster_size=dict(type='int', required=False, default=None),
|
||||||
fail_on_cluster_change=dict(type='bool', required=False, default=True),
|
fail_on_cluster_change=dict(type='bool', required=False, default=True),
|
||||||
migrate_tx_key=dict(type='str', required=False,
|
migrate_tx_key=dict(type='str', required=False, no_log=False,
|
||||||
default="migrate_tx_partitions_remaining"),
|
default="migrate_tx_partitions_remaining"),
|
||||||
migrate_rx_key=dict(type='str', required=False,
|
migrate_rx_key=dict(type='str', required=False, no_log=False,
|
||||||
default="migrate_rx_partitions_remaining")
|
default="migrate_rx_partitions_remaining")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user