mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-04-30 10:26:52 +00:00
Compare commits
434 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a861149a0e | ||
|
|
9a9b0b04a5 | ||
|
|
0a4e9379e2 | ||
|
|
7d1abf5d6a | ||
|
|
7ef25be10c | ||
|
|
0d0194fdf8 | ||
|
|
36f64367cf | ||
|
|
d827601c95 | ||
|
|
2efd31bacf | ||
|
|
6eaf047739 | ||
|
|
80268b0828 | ||
|
|
1e848c56f2 | ||
|
|
f74756d7fc | ||
|
|
138b57230a | ||
|
|
d32193afef | ||
|
|
ef8aa73dab | ||
|
|
28007079a4 | ||
|
|
f0b7233e8d | ||
|
|
48cc39a2b1 | ||
|
|
c34dc24d3a | ||
|
|
0760f60ca5 | ||
|
|
48b1bc7d47 | ||
|
|
769233808d | ||
|
|
7361ca5430 | ||
|
|
2322937a4a | ||
|
|
82225e5850 | ||
|
|
2d237987ae | ||
|
|
dc14070e08 | ||
|
|
feb1c1081e | ||
|
|
20bda07aaf | ||
|
|
1f6aa62210 | ||
|
|
5308f61b78 | ||
|
|
29636c1cc8 | ||
|
|
830734d6cf | ||
|
|
0296c200c7 | ||
|
|
2b435a591d | ||
|
|
ec2c793b08 | ||
|
|
a6bffa274c | ||
|
|
b653a9a84a | ||
|
|
8c209bdedc | ||
|
|
d4b4370ec4 | ||
|
|
e34276fa92 | ||
|
|
59d7850900 | ||
|
|
151b482fe6 | ||
|
|
ea04bb97cb | ||
|
|
5dd64a45de | ||
|
|
4df33d26b1 | ||
|
|
766f2dfe46 | ||
|
|
8a7128997d | ||
|
|
b598ca28f9 | ||
|
|
c943f7aa56 | ||
|
|
9565be5e50 | ||
|
|
725e670b47 | ||
|
|
ffdef00a6a | ||
|
|
92ccc6f013 | ||
|
|
f0c1b1065a | ||
|
|
a44356c966 | ||
|
|
33f9f0b05f | ||
|
|
f0f0704d64 | ||
|
|
55fe140230 | ||
|
|
ac543f5ef0 | ||
|
|
dbc0fe8859 | ||
|
|
42a1318fe3 | ||
|
|
d25352dc06 | ||
|
|
55682c52df | ||
|
|
46781d9fd1 | ||
|
|
4545d1c91e | ||
|
|
6570dfeb7d | ||
|
|
94c368f7df | ||
|
|
4cba1e60d9 | ||
|
|
321fb6c974 | ||
|
|
eb4d7a4199 | ||
|
|
4b07d45b7e | ||
|
|
d4a33433b4 | ||
|
|
e30b91cb8d | ||
|
|
b2b65c431b | ||
|
|
9ade4f6dd6 | ||
|
|
635d4f2138 | ||
|
|
6549e41ab8 | ||
|
|
6faface39e | ||
|
|
3b893ec421 | ||
|
|
65805e2dd6 | ||
|
|
297b50fb96 | ||
|
|
2edadb42fb | ||
|
|
4e1bf2d4ba | ||
|
|
b1a4a0ff21 | ||
|
|
e74ea7c8b8 | ||
|
|
6590f5e082 | ||
|
|
7483f71d31 | ||
|
|
6b215e3a9c | ||
|
|
3723e458d3 | ||
|
|
0f8bb43723 | ||
|
|
f33530dd61 | ||
|
|
8f3043058e | ||
|
|
3987b8a291 | ||
|
|
f7403a0b34 | ||
|
|
0a676406b3 | ||
|
|
5a7d234d80 | ||
|
|
fb9730f75e | ||
|
|
928aeafe1d | ||
|
|
5b68665571 | ||
|
|
e6b84acd1e | ||
|
|
c242993291 | ||
|
|
4f3de5658e | ||
|
|
301fcc3b7e | ||
|
|
0f0e9b2dca | ||
|
|
ed0636dc27 | ||
|
|
057321c6c6 | ||
|
|
1a4814de53 | ||
|
|
89b67a014b | ||
|
|
57bfbdc407 | ||
|
|
e19dffbf29 | ||
|
|
113e7cdfa0 | ||
|
|
c12be67a69 | ||
|
|
3a076fd585 | ||
|
|
4ef05a6483 | ||
|
|
936dd28395 | ||
|
|
e3b47899c5 | ||
|
|
fd8193e0bd | ||
|
|
fa477ebb35 | ||
|
|
43e766dd44 | ||
|
|
b25e0f360c | ||
|
|
658e95c5ca | ||
|
|
26c2876f50 | ||
|
|
62043463f3 | ||
|
|
f1dab6d4a7 | ||
|
|
d43764da79 | ||
|
|
de2feb2567 | ||
|
|
6e56bae0f3 | ||
|
|
1f7047e725 | ||
|
|
b2e4485567 | ||
|
|
b78254fe24 | ||
|
|
38aa0ec8ad | ||
|
|
42f28048a8 | ||
|
|
b699aaff7b | ||
|
|
af85b6c203 | ||
|
|
ec2e7cad3e | ||
|
|
7753fa4219 | ||
|
|
69ea487005 | ||
|
|
048f15fe68 | ||
|
|
aa1aa1d540 | ||
|
|
e78517ca93 | ||
|
|
bf185573a6 | ||
|
|
145435cdd9 | ||
|
|
6013c77c2b | ||
|
|
ad5482f63d | ||
|
|
f5594aefd5 | ||
|
|
ab5b379b30 | ||
|
|
1c5e44c649 | ||
|
|
23da67cc72 | ||
|
|
4032dd6b08 | ||
|
|
4cb6f39a80 | ||
|
|
3539957bac | ||
|
|
e05769d4bf | ||
|
|
19c03cff96 | ||
|
|
703660c81d | ||
|
|
fd32af1ac3 | ||
|
|
80fbcf2f98 | ||
|
|
a722e038cc | ||
|
|
19c8d2164d | ||
|
|
d4656ffca2 | ||
|
|
b49607f12d | ||
|
|
af0ce4284f | ||
|
|
f5f862617a | ||
|
|
a1a4ba4337 | ||
|
|
b0b783f8ff | ||
|
|
e670ca666a | ||
|
|
49b991527e | ||
|
|
e6cc671a0d | ||
|
|
797ea23e50 | ||
|
|
4d23b7a48b | ||
|
|
020b47a1a9 | ||
|
|
0da9d956a0 | ||
|
|
5691e3aff3 | ||
|
|
007333dbfe | ||
|
|
05666b0e4d | ||
|
|
c934d9aeb5 | ||
|
|
5b15e4089a | ||
|
|
a6379e45ce | ||
|
|
b95176dbc8 | ||
|
|
b752fea121 | ||
|
|
cf50990fed | ||
|
|
45343e6bc0 | ||
|
|
51540f6345 | ||
|
|
74eba52028 | ||
|
|
b920e8abf2 | ||
|
|
75c0004e1e | ||
|
|
be42fd4af7 | ||
|
|
1c05908ff6 | ||
|
|
ea42b75378 | ||
|
|
0330f4b52c | ||
|
|
1d8c659ba2 | ||
|
|
e784254679 | ||
|
|
d5e1edd284 | ||
|
|
2cc3ce0230 | ||
|
|
99c564398a | ||
|
|
ffd73296de | ||
|
|
aea12899cc | ||
|
|
1b79440896 | ||
|
|
5195536bd8 | ||
|
|
8ddb81a36f | ||
|
|
399c0ef849 | ||
|
|
f11f6595cc | ||
|
|
2799cd4ac7 | ||
|
|
e0b731e76f | ||
|
|
fb1b756d48 | ||
|
|
31c9ed0fe6 | ||
|
|
6f6b80fd89 | ||
|
|
ca48917b4f | ||
|
|
5ca19086a4 | ||
|
|
96ad40ac1c | ||
|
|
32d071e349 | ||
|
|
8db59ff02d | ||
|
|
e1d28cf052 | ||
|
|
c768060d95 | ||
|
|
68243063d1 | ||
|
|
cecbc2be2d | ||
|
|
fe2757f057 | ||
|
|
8ab19fc50b | ||
|
|
f7928d3eb7 | ||
|
|
fc12eca65d | ||
|
|
0231dad3e8 | ||
|
|
6ab9b05da3 | ||
|
|
5b4fab80e2 | ||
|
|
84a79c3da4 | ||
|
|
49c07dc18b | ||
|
|
7aaa26b591 | ||
|
|
412b4711c3 | ||
|
|
b10d707a8b | ||
|
|
04bf8137fa | ||
|
|
20401c63cd | ||
|
|
eb3ee83146 | ||
|
|
93046e0350 | ||
|
|
fbbd8ecd6f | ||
|
|
d5c26b6f70 | ||
|
|
f87a39b21d | ||
|
|
91a0264f38 | ||
|
|
6a8eb7b388 | ||
|
|
ec9c23437c | ||
|
|
721589827e | ||
|
|
f9e3e229dd | ||
|
|
1400051890 | ||
|
|
118d903e7d | ||
|
|
d09bc2525b | ||
|
|
3a8206fe62 | ||
|
|
f77aa51ab8 | ||
|
|
123b5a9a3c | ||
|
|
085c43b76b | ||
|
|
69a9a77b65 | ||
|
|
f4858d64f4 | ||
|
|
f97d5ca701 | ||
|
|
bfd6d2b3aa | ||
|
|
081c534d40 | ||
|
|
98af8161b2 | ||
|
|
1f001cafd9 | ||
|
|
8ab356520d | ||
|
|
89b7e7191f | ||
|
|
7356451aa1 | ||
|
|
31645ded11 | ||
|
|
fa13826273 | ||
|
|
5502e4ec17 | ||
|
|
8eb2331aea | ||
|
|
f0b7c6351e | ||
|
|
4b71e088c7 | ||
|
|
0cd0f0eaf6 | ||
|
|
b6ae47c455 | ||
|
|
4b6722d938 | ||
|
|
595d590862 | ||
|
|
7f91821bcc | ||
|
|
40ce0f995b | ||
|
|
7145204594 | ||
|
|
beb3b85a4f | ||
|
|
9aec9b502e | ||
|
|
9a5191d1f9 | ||
|
|
6bea8215c9 | ||
|
|
eb851d4208 | ||
|
|
d2070277e8 | ||
|
|
533e01a3f9 | ||
|
|
b81a7cdd16 | ||
|
|
b97e31dd55 | ||
|
|
d92d0632eb | ||
|
|
95156a11a1 | ||
|
|
c8885fdfbd | ||
|
|
3312ae08af | ||
|
|
1d1cbc4f56 | ||
|
|
f1dbef4143 | ||
|
|
604a5dbf49 | ||
|
|
3355e65781 | ||
|
|
19db6f24f7 | ||
|
|
eb24e33666 | ||
|
|
73bb0f1900 | ||
|
|
0de196413f | ||
|
|
0bc76c98b0 | ||
|
|
cdc415ea1f | ||
|
|
e7a0a12c3f | ||
|
|
62cd38a9a0 | ||
|
|
2558cd3f01 | ||
|
|
de8e2a83e2 | ||
|
|
db26514bf1 | ||
|
|
04f46f0435 | ||
|
|
94cf07efbf | ||
|
|
926c0a71d0 | ||
|
|
be13f41b30 | ||
|
|
7fe9dd7a60 | ||
|
|
09351d9010 | ||
|
|
88994ef2b7 | ||
|
|
af441aecfc | ||
|
|
5fc56676c2 | ||
|
|
6529390901 | ||
|
|
c147d2fb98 | ||
|
|
68fc48cd1f | ||
|
|
81f3ad45c9 | ||
|
|
606eb0df15 | ||
|
|
ff9f98795e | ||
|
|
f5a9584ae6 | ||
|
|
24f8be834a | ||
|
|
a23fc67f1f | ||
|
|
efd441407f | ||
|
|
79fb3e9852 | ||
|
|
0b2ebabd29 | ||
|
|
8225b745f3 | ||
|
|
fe61be3e11 | ||
|
|
4fbef900e1 | ||
|
|
0f61ae4841 | ||
|
|
3162ed6795 | ||
|
|
84b54ad6a2 | ||
|
|
f8859af377 | ||
|
|
49d9a257ef | ||
|
|
4676ca584b | ||
|
|
1ea080762b | ||
|
|
178209be27 | ||
|
|
d0bb74a03b | ||
|
|
7452a53647 | ||
|
|
36daa7c48e | ||
|
|
1ca9229c66 | ||
|
|
2906591c08 | ||
|
|
088743749b | ||
|
|
a013e69d67 | ||
|
|
ff4e4c055c | ||
|
|
53c6b49673 | ||
|
|
7425e9840d | ||
|
|
1133e5c865 | ||
|
|
f49cf2c22d | ||
|
|
5fdbe084e7 | ||
|
|
e9866a2ccd | ||
|
|
ac95ff5b45 | ||
|
|
dec345b818 | ||
|
|
ce5aea790d | ||
|
|
ad8aa1b1e6 | ||
|
|
3f882ee6a2 | ||
|
|
677ab8e383 | ||
|
|
4f98136771 | ||
|
|
585dd0b6ed | ||
|
|
bec43041a9 | ||
|
|
b4c136125e | ||
|
|
4a8d6cf7cc | ||
|
|
20bd065e77 | ||
|
|
ea65ce8e0d | ||
|
|
811b609b05 | ||
|
|
5447910a0b | ||
|
|
76d9fe4ec6 | ||
|
|
afe9d0fdb3 | ||
|
|
71706031c7 | ||
|
|
36dea9ab97 | ||
|
|
bb7ce740fe | ||
|
|
cf5e9bf44c | ||
|
|
434f383ae9 | ||
|
|
e353390e6c | ||
|
|
0b9893959f | ||
|
|
305748b333 | ||
|
|
abfbe2a48d | ||
|
|
c0f3a63e18 | ||
|
|
389b004879 | ||
|
|
fdb66d5567 | ||
|
|
57f56b02d8 | ||
|
|
a44ffdc20d | ||
|
|
682674dd5f | ||
|
|
5135587c16 | ||
|
|
e0dd4b240f | ||
|
|
a1badbb5b2 | ||
|
|
6165438689 | ||
|
|
3778eac1ba | ||
|
|
03b7b39424 | ||
|
|
6dd4cd0eb7 | ||
|
|
03fd6bd008 | ||
|
|
f33323ca89 | ||
|
|
5aac81bdd1 | ||
|
|
8fae693d9c | ||
|
|
1cce279424 | ||
|
|
d09a558fda | ||
|
|
bd372939bc | ||
|
|
41bc7816f3 | ||
|
|
865acdd4cf | ||
|
|
e247300523 | ||
|
|
367c3c43ff | ||
|
|
0a5f79724c | ||
|
|
f12df1d21b | ||
|
|
ba4a98b1be | ||
|
|
436bbb0077 | ||
|
|
e9551df5ed | ||
|
|
9a6031ab4e | ||
|
|
562ff7efb7 | ||
|
|
93e0aa7557 | ||
|
|
9aef0ed17e | ||
|
|
af64c9a432 | ||
|
|
d1e54d2fd1 | ||
|
|
e898e52d1b | ||
|
|
89ffb04dff | ||
|
|
c03ae754d2 | ||
|
|
909ac92fe2 | ||
|
|
29bd5a9486 | ||
|
|
f4e60e09ac | ||
|
|
d4f3a47d48 | ||
|
|
701a89eb1c | ||
|
|
dd0b54b9b5 | ||
|
|
f509f2c896 | ||
|
|
43da5b88db | ||
|
|
ae8edc02e1 | ||
|
|
2297f2f802 | ||
|
|
aa95d8a5b7 | ||
|
|
b40a5ef09a | ||
|
|
4e70c0c55a | ||
|
|
e8886fa711 | ||
|
|
8dbb13edd4 | ||
|
|
6d86564308 | ||
|
|
165719d084 | ||
|
|
1591d52b78 | ||
|
|
8afdd23be4 | ||
|
|
6af3c96d8e | ||
|
|
d0f097c871 | ||
|
|
9c648c8e3a | ||
|
|
00f5f7dfe7 | ||
|
|
b6774971a6 | ||
|
|
83afe0f868 |
@@ -186,8 +186,8 @@ stages:
|
|||||||
test: macos/11.1
|
test: macos/11.1
|
||||||
- name: RHEL 7.9
|
- name: RHEL 7.9
|
||||||
test: rhel/7.9
|
test: rhel/7.9
|
||||||
- name: RHEL 8.4
|
- name: RHEL 8.3
|
||||||
test: rhel/8.4
|
test: rhel/8.3
|
||||||
- name: FreeBSD 12.2
|
- name: FreeBSD 12.2
|
||||||
test: freebsd/12.2
|
test: freebsd/12.2
|
||||||
- name: FreeBSD 13.0
|
- name: FreeBSD 13.0
|
||||||
|
|||||||
48
.github/BOTMETA.yml
vendored
48
.github/BOTMETA.yml
vendored
@@ -60,6 +60,8 @@ files:
|
|||||||
maintainers: giner
|
maintainers: giner
|
||||||
$filters/from_csv.py:
|
$filters/from_csv.py:
|
||||||
maintainers: Ajpantuso
|
maintainers: Ajpantuso
|
||||||
|
$filters/hashids:
|
||||||
|
maintainers: Ajpantuso
|
||||||
$filters/jc.py:
|
$filters/jc.py:
|
||||||
maintainers: kellyjonbrazil
|
maintainers: kellyjonbrazil
|
||||||
$filters/list.py:
|
$filters/list.py:
|
||||||
@@ -83,6 +85,8 @@ files:
|
|||||||
maintainers: $team_linode
|
maintainers: $team_linode
|
||||||
labels: cloud linode
|
labels: cloud linode
|
||||||
keywords: linode dynamic inventory script
|
keywords: linode dynamic inventory script
|
||||||
|
$inventories/lxd.py:
|
||||||
|
maintainers: conloos
|
||||||
$inventories/proxmox.py:
|
$inventories/proxmox.py:
|
||||||
maintainers: $team_virt ilijamt
|
maintainers: $team_virt ilijamt
|
||||||
$inventories/scaleway.py:
|
$inventories/scaleway.py:
|
||||||
@@ -113,6 +117,8 @@ files:
|
|||||||
$lookups/nios:
|
$lookups/nios:
|
||||||
maintainers: $team_networking sganesh-infoblox
|
maintainers: $team_networking sganesh-infoblox
|
||||||
labels: infoblox networking
|
labels: infoblox networking
|
||||||
|
$lookups/random_string.py:
|
||||||
|
maintainers: Akasurde
|
||||||
$module_utils/:
|
$module_utils/:
|
||||||
labels: module_utils
|
labels: module_utils
|
||||||
$module_utils/gitlab.py:
|
$module_utils/gitlab.py:
|
||||||
@@ -135,6 +141,9 @@ files:
|
|||||||
$module_utils/memset.py:
|
$module_utils/memset.py:
|
||||||
maintainers: glitchcrab
|
maintainers: glitchcrab
|
||||||
labels: cloud memset
|
labels: cloud memset
|
||||||
|
$module_utils/mh/:
|
||||||
|
maintainers: russoz
|
||||||
|
labels: module_helper
|
||||||
$module_utils/module_helper.py:
|
$module_utils/module_helper.py:
|
||||||
maintainers: russoz
|
maintainers: russoz
|
||||||
labels: module_helper
|
labels: module_helper
|
||||||
@@ -150,7 +159,6 @@ files:
|
|||||||
$module_utils/redfish_utils.py:
|
$module_utils/redfish_utils.py:
|
||||||
maintainers: $team_redfish
|
maintainers: $team_redfish
|
||||||
labels: redfish_utils
|
labels: redfish_utils
|
||||||
$module_utils/remote_management/dellemc/: rajeevarakkal
|
|
||||||
$module_utils/remote_management/lxca/common.py: navalkp prabhosa
|
$module_utils/remote_management/lxca/common.py: navalkp prabhosa
|
||||||
$module_utils/scaleway.py:
|
$module_utils/scaleway.py:
|
||||||
maintainers: $team_scaleway
|
maintainers: $team_scaleway
|
||||||
@@ -194,8 +202,6 @@ files:
|
|||||||
maintainers: glitchcrab
|
maintainers: glitchcrab
|
||||||
$modules/cloud/misc/cloud_init_data_facts.py:
|
$modules/cloud/misc/cloud_init_data_facts.py:
|
||||||
maintainers: resmo
|
maintainers: resmo
|
||||||
$modules/cloud/misc/helm.py:
|
|
||||||
maintainers: flaper87
|
|
||||||
$modules/cloud/misc/proxmox.py:
|
$modules/cloud/misc/proxmox.py:
|
||||||
maintainers: $team_virt UnderGreen
|
maintainers: $team_virt UnderGreen
|
||||||
labels: proxmox virt
|
labels: proxmox virt
|
||||||
@@ -340,10 +346,14 @@ files:
|
|||||||
$modules/database/mssql/mssql_db.py:
|
$modules/database/mssql/mssql_db.py:
|
||||||
maintainers: vedit Jmainguy kenichi-ogawa-1988
|
maintainers: vedit Jmainguy kenichi-ogawa-1988
|
||||||
labels: mssql_db
|
labels: mssql_db
|
||||||
|
$modules/database/saphana/hana_query.py:
|
||||||
|
maintainers: rainerleber
|
||||||
$modules/database/vertica/:
|
$modules/database/vertica/:
|
||||||
maintainers: dareko
|
maintainers: dareko
|
||||||
$modules/files/archive.py:
|
$modules/files/archive.py:
|
||||||
maintainers: bendoh
|
maintainers: bendoh
|
||||||
|
$modules/files/filesize.py:
|
||||||
|
maintainers: quidame
|
||||||
$modules/files/ini_file.py:
|
$modules/files/ini_file.py:
|
||||||
maintainers: jpmens noseka1
|
maintainers: jpmens noseka1
|
||||||
$modules/files/iso_extract.py:
|
$modules/files/iso_extract.py:
|
||||||
@@ -357,8 +367,6 @@ files:
|
|||||||
maintainers: dagwieers magnus919 tbielawa cmprescott sm4rk0
|
maintainers: dagwieers magnus919 tbielawa cmprescott sm4rk0
|
||||||
labels: m:xml xml
|
labels: m:xml xml
|
||||||
ignore: magnus919
|
ignore: magnus919
|
||||||
$modules/identity/onepassword_facts.py:
|
|
||||||
maintainers: Rylon
|
|
||||||
$modules/identity/ipa/:
|
$modules/identity/ipa/:
|
||||||
maintainers: $team_ipa
|
maintainers: $team_ipa
|
||||||
$modules/identity/ipa/ipa_pwpolicy.py:
|
$modules/identity/ipa/ipa_pwpolicy.py:
|
||||||
@@ -371,6 +379,8 @@ files:
|
|||||||
maintainers: $team_keycloak
|
maintainers: $team_keycloak
|
||||||
$modules/identity/keycloak/keycloak_group.py:
|
$modules/identity/keycloak/keycloak_group.py:
|
||||||
maintainers: adamgoossens
|
maintainers: adamgoossens
|
||||||
|
$modules/identity/keycloak/keycloak_realm.py:
|
||||||
|
maintainers: kris2kris
|
||||||
$modules/identity/onepassword_info.py:
|
$modules/identity/onepassword_info.py:
|
||||||
maintainers: Rylon
|
maintainers: Rylon
|
||||||
$modules/identity/opendj/opendj_backendprop.py:
|
$modules/identity/opendj/opendj_backendprop.py:
|
||||||
@@ -463,8 +473,6 @@ files:
|
|||||||
maintainers: akostyuk
|
maintainers: akostyuk
|
||||||
$modules/net_tools/ipwcli_dns.py:
|
$modules/net_tools/ipwcli_dns.py:
|
||||||
maintainers: cwollinger
|
maintainers: cwollinger
|
||||||
$modules/net_tools/ldap/ldap_attr.py:
|
|
||||||
maintainers: jtyr
|
|
||||||
$modules/net_tools/ldap/ldap_attrs.py:
|
$modules/net_tools/ldap/ldap_attrs.py:
|
||||||
maintainers: drybjed jtyr noles
|
maintainers: drybjed jtyr noles
|
||||||
$modules/net_tools/ldap/ldap_entry.py:
|
$modules/net_tools/ldap/ldap_entry.py:
|
||||||
@@ -563,7 +571,7 @@ files:
|
|||||||
maintainers: dmtrs
|
maintainers: dmtrs
|
||||||
ignore: resmo
|
ignore: resmo
|
||||||
$modules/packaging/language/cpanm.py:
|
$modules/packaging/language/cpanm.py:
|
||||||
maintainers: fcuny
|
maintainers: fcuny russoz
|
||||||
$modules/packaging/language/easy_install.py:
|
$modules/packaging/language/easy_install.py:
|
||||||
maintainers: mattupstate
|
maintainers: mattupstate
|
||||||
$modules/packaging/language/gem.py:
|
$modules/packaging/language/gem.py:
|
||||||
@@ -643,6 +651,9 @@ files:
|
|||||||
maintainers: elasticdog indrajitr tchernomax
|
maintainers: elasticdog indrajitr tchernomax
|
||||||
labels: pacman
|
labels: pacman
|
||||||
ignore: elasticdog
|
ignore: elasticdog
|
||||||
|
$modules/packaging/os/pacman_key.py:
|
||||||
|
maintainers: grawlinson
|
||||||
|
labels: pacman
|
||||||
$modules/packaging/os/pkgin.py:
|
$modules/packaging/os/pkgin.py:
|
||||||
maintainers: $team_solaris L2G jasperla szinck martinm82
|
maintainers: $team_solaris L2G jasperla szinck martinm82
|
||||||
labels: pkgin solaris
|
labels: pkgin solaris
|
||||||
@@ -713,12 +724,6 @@ files:
|
|||||||
ignore: matze
|
ignore: matze
|
||||||
$modules/remote_management/cobbler/:
|
$modules/remote_management/cobbler/:
|
||||||
maintainers: dagwieers
|
maintainers: dagwieers
|
||||||
$modules/remote_management/dellemc/:
|
|
||||||
maintainers: rajeevarakkal
|
|
||||||
$modules/remote_management/dellemc/idrac_server_config_profile.py:
|
|
||||||
maintainers: jagadeeshnv
|
|
||||||
$modules/remote_management/dellemc/ome_device_info.py:
|
|
||||||
maintainers: Sajna-Shetty
|
|
||||||
$modules/remote_management/hpilo/:
|
$modules/remote_management/hpilo/:
|
||||||
maintainers: haad
|
maintainers: haad
|
||||||
ignore: dagwieers
|
ignore: dagwieers
|
||||||
@@ -738,8 +743,6 @@ files:
|
|||||||
maintainers: evertmulder
|
maintainers: evertmulder
|
||||||
$modules/remote_management/manageiq/manageiq_tenant.py:
|
$modules/remote_management/manageiq/manageiq_tenant.py:
|
||||||
maintainers: evertmulder
|
maintainers: evertmulder
|
||||||
$modules/remote_management/oneview/oneview_datacenter_facts.py:
|
|
||||||
maintainers: aalexmonteiro madhav-bharadwaj ricardogpsf soodpr
|
|
||||||
$modules/remote_management/oneview/:
|
$modules/remote_management/oneview/:
|
||||||
maintainers: adriane-cardozo fgbulsoni tmiotto
|
maintainers: adriane-cardozo fgbulsoni tmiotto
|
||||||
$modules/remote_management/oneview/oneview_datacenter_info.py:
|
$modules/remote_management/oneview/oneview_datacenter_info.py:
|
||||||
@@ -788,12 +791,6 @@ files:
|
|||||||
maintainers: yeukhon
|
maintainers: yeukhon
|
||||||
$modules/storage/emc/emc_vnx_sg_member.py:
|
$modules/storage/emc/emc_vnx_sg_member.py:
|
||||||
maintainers: remixtj
|
maintainers: remixtj
|
||||||
$modules/storage/glusterfs/:
|
|
||||||
maintainers: devyanikota
|
|
||||||
$modules/storage/glusterfs/gluster_peer.py:
|
|
||||||
maintainers: sac
|
|
||||||
$modules/storage/glusterfs/gluster_volume.py:
|
|
||||||
maintainers: rosmo
|
|
||||||
$modules/storage/hpe3par/ss_3par_cpg.py:
|
$modules/storage/hpe3par/ss_3par_cpg.py:
|
||||||
maintainers: farhan7500 gautamphegde
|
maintainers: farhan7500 gautamphegde
|
||||||
$modules/storage/ibm/:
|
$modules/storage/ibm/:
|
||||||
@@ -815,9 +812,6 @@ files:
|
|||||||
maintainers: johanwiren
|
maintainers: johanwiren
|
||||||
$modules/storage/zfs/zfs_delegate_admin.py:
|
$modules/storage/zfs/zfs_delegate_admin.py:
|
||||||
maintainers: natefoo
|
maintainers: natefoo
|
||||||
$modules/system/python_requirements_facts.py:
|
|
||||||
maintainers: willthames
|
|
||||||
ignore: ryansb
|
|
||||||
$modules/system/aix:
|
$modules/system/aix:
|
||||||
maintainers: $team_aix
|
maintainers: $team_aix
|
||||||
labels: aix
|
labels: aix
|
||||||
@@ -950,10 +944,6 @@ files:
|
|||||||
labels: xfconf
|
labels: xfconf
|
||||||
$modules/system/xfs_quota.py:
|
$modules/system/xfs_quota.py:
|
||||||
maintainers: bushvin
|
maintainers: bushvin
|
||||||
$modules/web_infrastructure/jenkins_job_facts.py:
|
|
||||||
maintainers: stpierre
|
|
||||||
$modules/web_infrastructure/nginx_status_facts.py:
|
|
||||||
maintainers: resmo
|
|
||||||
$modules/web_infrastructure/apache2_mod_proxy.py:
|
$modules/web_infrastructure/apache2_mod_proxy.py:
|
||||||
maintainers: oboukili
|
maintainers: oboukili
|
||||||
$modules/web_infrastructure/apache2_module.py:
|
$modules/web_infrastructure/apache2_module.py:
|
||||||
|
|||||||
135
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
135
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
description: Create a report to help us improve
|
||||||
|
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
⚠
|
||||||
|
Verify first that your issue is not [already reported on GitHub][issue search].
|
||||||
|
Also test if the latest release and devel branch are affected too.
|
||||||
|
*Complete **all** sections as described, this form is processed automatically.*
|
||||||
|
|
||||||
|
[issue search]: https://github.com/ansible-collections/community.general/search?q=is%3Aissue&type=issues
|
||||||
|
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Summary
|
||||||
|
description: Explain the problem briefly below.
|
||||||
|
placeholder: >-
|
||||||
|
When I try to do X with the collection from the main branch on GitHub, Y
|
||||||
|
breaks in a way Z under the env E. Here are all the details I know
|
||||||
|
about this problem...
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
attributes:
|
||||||
|
label: Issue Type
|
||||||
|
# FIXME: Once GitHub allows defining the default choice, update this
|
||||||
|
options:
|
||||||
|
- Bug Report
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
# For smaller collections we could use a multi-select and hardcode the list
|
||||||
|
# May generate this list via GitHub action and walking files under https://github.com/ansible-collections/community.general/tree/main/plugins
|
||||||
|
# Select from list, filter as you type (`mysql` would only show the 3 mysql components)
|
||||||
|
# OR freeform - doesn't seem to be supported in adaptivecards
|
||||||
|
label: Component Name
|
||||||
|
description: >-
|
||||||
|
Write the short name of the module, plugin, task or feature below,
|
||||||
|
*use your best guess if unsure*.
|
||||||
|
placeholder: dnf, apt, yum, pip, user etc.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Ansible Version
|
||||||
|
description: >-
|
||||||
|
Paste verbatim output from `ansible --version` between
|
||||||
|
tripple backticks.
|
||||||
|
value: |
|
||||||
|
```console (paste below)
|
||||||
|
$ ansible --version
|
||||||
|
|
||||||
|
```
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Configuration
|
||||||
|
description: >-
|
||||||
|
If this issue has an example piece of YAML that can help to reproduce this problem, please provide it.
|
||||||
|
This can be a piece of YAML from, e.g., an automation, script, scene or configuration.
|
||||||
|
Paste verbatim output from `ansible-config dump --only-changed` between quotes
|
||||||
|
value: |
|
||||||
|
```console (paste below)
|
||||||
|
$ ansible-config dump --only-changed
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: OS / Environment
|
||||||
|
description: >-
|
||||||
|
Provide all relevant information below, e.g. target OS versions,
|
||||||
|
network device firmware, etc.
|
||||||
|
placeholder: RHEL 8, CentOS Stream etc.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Steps to Reproduce
|
||||||
|
description: |
|
||||||
|
Describe exactly how to reproduce the problem, using a minimal test-case. It would *really* help us understand your problem if you could also pased any playbooks, configs and commands you used.
|
||||||
|
|
||||||
|
**HINT:** You can paste https://gist.github.com links for larger files.
|
||||||
|
value: |
|
||||||
|
<!--- Paste example playbooks or commands between quotes below -->
|
||||||
|
```yaml (paste below)
|
||||||
|
|
||||||
|
```
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Expected Results
|
||||||
|
description: >-
|
||||||
|
Describe what you expected to happen when running the steps above.
|
||||||
|
placeholder: >-
|
||||||
|
I expected X to happen because I assumed Y.
|
||||||
|
that it did not.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Actual Results
|
||||||
|
description: |
|
||||||
|
Describe what actually happened. If possible run with extra verbosity (`-vvvv`).
|
||||||
|
|
||||||
|
Paste verbatim command output between quotes.
|
||||||
|
value: |
|
||||||
|
```console (paste below)
|
||||||
|
|
||||||
|
```
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Code of Conduct
|
||||||
|
description: |
|
||||||
|
Read the [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--ansible-collections) first.
|
||||||
|
options:
|
||||||
|
- label: I agree to follow the Ansible Code of Conduct
|
||||||
|
required: true
|
||||||
|
...
|
||||||
27
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
27
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
---
|
||||||
|
# Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser
|
||||||
|
blank_issues_enabled: false # default: true
|
||||||
|
contact_links:
|
||||||
|
- name: Security bug report
|
||||||
|
url: https://docs.ansible.com/ansible-core/devel/community/reporting_bugs_and_features.html?utm_medium=github&utm_source=issue_template_chooser_ansible_collections
|
||||||
|
about: |
|
||||||
|
Please learn how to report security vulnerabilities here.
|
||||||
|
|
||||||
|
For all security related bugs, email security@ansible.com
|
||||||
|
instead of using this issue tracker and you will receive
|
||||||
|
a prompt response.
|
||||||
|
|
||||||
|
For more information, see
|
||||||
|
https://docs.ansible.com/ansible/latest/community/reporting_bugs_and_features.html
|
||||||
|
- name: Ansible Code of Conduct
|
||||||
|
url: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_template_chooser_ansible_collections
|
||||||
|
about: Be nice to other members of the community.
|
||||||
|
- name: Talks to the community
|
||||||
|
url: https://docs.ansible.com/ansible/latest/community/communication.html?utm_medium=github&utm_source=issue_template_chooser#mailing-list-information
|
||||||
|
about: Please ask and answer usage questions here
|
||||||
|
- name: Working groups
|
||||||
|
url: https://github.com/ansible/community/wiki
|
||||||
|
about: Interested in improving a specific area? Become a part of a working group!
|
||||||
|
- name: For Enterprise
|
||||||
|
url: https://www.ansible.com/products/engine?utm_medium=github&utm_source=issue_template_chooser_ansible_collections
|
||||||
|
about: Red Hat offers support for the Ansible Automation Platform
|
||||||
111
.github/ISSUE_TEMPLATE/documentation_report.yml
vendored
Normal file
111
.github/ISSUE_TEMPLATE/documentation_report.yml
vendored
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
---
|
||||||
|
name: Documentation Report
|
||||||
|
description: Ask us about docs
|
||||||
|
# NOTE: issue body is enabled to allow screenshots
|
||||||
|
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
⚠
|
||||||
|
Verify first that your issue is not [already reported on GitHub][issue search].
|
||||||
|
Also test if the latest release and devel branch are affected too.
|
||||||
|
*Complete **all** sections as described, this form is processed automatically.*
|
||||||
|
|
||||||
|
[issue search]: https://github.com/ansible-collections/community.general/search?q=is%3Aissue&type=issues
|
||||||
|
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Summary
|
||||||
|
description: |
|
||||||
|
Explain the problem briefly below, add suggestions to wording or structure.
|
||||||
|
|
||||||
|
**HINT:** Did you know the documentation has an `Edit on GitHub` link on every page?
|
||||||
|
placeholder: >-
|
||||||
|
I was reading the Collection documentation of version X and I'm having
|
||||||
|
problems understanding Y. It would be very helpful if that got
|
||||||
|
rephrased as Z.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
attributes:
|
||||||
|
label: Issue Type
|
||||||
|
# FIXME: Once GitHub allows defining the default choice, update this
|
||||||
|
options:
|
||||||
|
- Documentation Report
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
attributes:
|
||||||
|
label: Component Name
|
||||||
|
description: >-
|
||||||
|
Write the short name of the rst file, module, plugin, task or
|
||||||
|
feature below, *use your best guess if unsure*.
|
||||||
|
placeholder: mysql_user
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Ansible Version
|
||||||
|
description: >-
|
||||||
|
Paste verbatim output from `ansible --version` between
|
||||||
|
tripple backticks.
|
||||||
|
value: |
|
||||||
|
```console (paste below)
|
||||||
|
$ ansible --version
|
||||||
|
|
||||||
|
```
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Configuration
|
||||||
|
description: >-
|
||||||
|
Paste verbatim output from `ansible-config dump --only-changed` between quotes.
|
||||||
|
value: |
|
||||||
|
```console (paste below)
|
||||||
|
$ ansible-config dump --only-changed
|
||||||
|
|
||||||
|
```
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: OS / Environment
|
||||||
|
description: >-
|
||||||
|
Provide all relevant information below, e.g. OS version,
|
||||||
|
browser, etc.
|
||||||
|
placeholder: Fedora 33, Firefox etc.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Additional Information
|
||||||
|
description: |
|
||||||
|
Describe how this improves the documentation, e.g. before/after situation or screenshots.
|
||||||
|
|
||||||
|
**Tip:** It's not possible to upload the screenshot via this field directly but you can use the last textarea in this form to attach them.
|
||||||
|
|
||||||
|
**HINT:** You can paste https://gist.github.com links for larger files.
|
||||||
|
placeholder: >-
|
||||||
|
When the improvement is applied, it makes it more straightforward
|
||||||
|
to understand X.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Code of Conduct
|
||||||
|
description: |
|
||||||
|
Read the [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--ansible-collections) first.
|
||||||
|
options:
|
||||||
|
- label: I agree to follow the Ansible Code of Conduct
|
||||||
|
required: true
|
||||||
|
...
|
||||||
69
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
69
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
description: Suggest an idea for this project
|
||||||
|
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
⚠
|
||||||
|
Verify first that your issue is not [already reported on GitHub][issue search].
|
||||||
|
Also test if the latest release and devel branch are affected too.
|
||||||
|
*Complete **all** sections as described, this form is processed automatically.*
|
||||||
|
|
||||||
|
[issue search]: https://github.com/ansible-collections/community.general/search?q=is%3Aissue&type=issues
|
||||||
|
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Summary
|
||||||
|
description: Describe the new feature/improvement briefly below.
|
||||||
|
placeholder: >-
|
||||||
|
I am trying to do X with the collection from the main branch on GitHub and
|
||||||
|
I think that implementing a feature Y would be very helpful for me and
|
||||||
|
every other user of ansible-core because of Z.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
attributes:
|
||||||
|
label: Issue Type
|
||||||
|
# FIXME: Once GitHub allows defining the default choice, update this
|
||||||
|
options:
|
||||||
|
- Feature Idea
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
attributes:
|
||||||
|
label: Component Name
|
||||||
|
description: >-
|
||||||
|
Write the short name of the module, plugin, task or feature below,
|
||||||
|
*use your best guess if unsure*.
|
||||||
|
placeholder: dnf, apt, yum, pip, user etc.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Additional Information
|
||||||
|
description: |
|
||||||
|
Describe how the feature would be used, why it is needed and what it would solve.
|
||||||
|
|
||||||
|
**HINT:** You can paste https://gist.github.com links for larger files.
|
||||||
|
value: |
|
||||||
|
<!--- Paste example playbooks or commands between quotes below -->
|
||||||
|
```yaml (paste below)
|
||||||
|
|
||||||
|
```
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Code of Conduct
|
||||||
|
description: |
|
||||||
|
Read the [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--ansible-collections) first.
|
||||||
|
options:
|
||||||
|
- label: I agree to follow the Ansible Code of Conduct
|
||||||
|
required: true
|
||||||
|
...
|
||||||
81
.gitignore
vendored
81
.gitignore
vendored
@@ -1,6 +1,6 @@
|
|||||||
|
|
||||||
# Created by https://www.toptal.com/developers/gitignore/api/git,linux,pydev,python,windows,pycharm+all,jupyternotebook,vim,webstorm,emacs,dotenv
|
# Created by https://www.gitignore.io/api/git,linux,pydev,python,windows,pycharm+all,jupyternotebook,vim,webstorm,emacs,dotenv
|
||||||
# Edit at https://www.toptal.com/developers/gitignore?templates=git,linux,pydev,python,windows,pycharm+all,jupyternotebook,vim,webstorm,emacs,dotenv
|
# Edit at https://www.gitignore.io/?templates=git,linux,pydev,python,windows,pycharm+all,jupyternotebook,vim,webstorm,emacs,dotenv
|
||||||
|
|
||||||
### dotenv ###
|
### dotenv ###
|
||||||
.env
|
.env
|
||||||
@@ -88,7 +88,7 @@ flycheck_*.el
|
|||||||
.nfs*
|
.nfs*
|
||||||
|
|
||||||
### PyCharm+all ###
|
### PyCharm+all ###
|
||||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
|
||||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||||
|
|
||||||
# User-specific stuff
|
# User-specific stuff
|
||||||
@@ -98,9 +98,6 @@ flycheck_*.el
|
|||||||
.idea/**/dictionaries
|
.idea/**/dictionaries
|
||||||
.idea/**/shelf
|
.idea/**/shelf
|
||||||
|
|
||||||
# AWS User-specific
|
|
||||||
.idea/**/aws.xml
|
|
||||||
|
|
||||||
# Generated files
|
# Generated files
|
||||||
.idea/**/contentModel.xml
|
.idea/**/contentModel.xml
|
||||||
|
|
||||||
@@ -121,9 +118,6 @@ flycheck_*.el
|
|||||||
# When using Gradle or Maven with auto-import, you should exclude module files,
|
# When using Gradle or Maven with auto-import, you should exclude module files,
|
||||||
# since they will be recreated, and may cause churn. Uncomment if using
|
# since they will be recreated, and may cause churn. Uncomment if using
|
||||||
# auto-import.
|
# auto-import.
|
||||||
# .idea/artifacts
|
|
||||||
# .idea/compiler.xml
|
|
||||||
# .idea/jarRepositories.xml
|
|
||||||
# .idea/modules.xml
|
# .idea/modules.xml
|
||||||
# .idea/*.iml
|
# .idea/*.iml
|
||||||
# .idea/modules
|
# .idea/modules
|
||||||
@@ -204,6 +198,7 @@ parts/
|
|||||||
sdist/
|
sdist/
|
||||||
var/
|
var/
|
||||||
wheels/
|
wheels/
|
||||||
|
pip-wheel-metadata/
|
||||||
share/python-wheels/
|
share/python-wheels/
|
||||||
*.egg-info/
|
*.egg-info/
|
||||||
.installed.cfg
|
.installed.cfg
|
||||||
@@ -230,25 +225,13 @@ htmlcov/
|
|||||||
nosetests.xml
|
nosetests.xml
|
||||||
coverage.xml
|
coverage.xml
|
||||||
*.cover
|
*.cover
|
||||||
*.py,cover
|
|
||||||
.hypothesis/
|
.hypothesis/
|
||||||
.pytest_cache/
|
.pytest_cache/
|
||||||
cover/
|
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
*.pot
|
*.pot
|
||||||
|
|
||||||
# Django stuff:
|
|
||||||
*.log
|
|
||||||
local_settings.py
|
|
||||||
db.sqlite3
|
|
||||||
db.sqlite3-journal
|
|
||||||
|
|
||||||
# Flask stuff:
|
|
||||||
instance/
|
|
||||||
.webassets-cache
|
|
||||||
|
|
||||||
# Scrapy stuff:
|
# Scrapy stuff:
|
||||||
.scrapy
|
.scrapy
|
||||||
|
|
||||||
@@ -256,19 +239,9 @@ instance/
|
|||||||
docs/_build/
|
docs/_build/
|
||||||
|
|
||||||
# PyBuilder
|
# PyBuilder
|
||||||
.pybuilder/
|
|
||||||
target/
|
target/
|
||||||
|
|
||||||
# Jupyter Notebook
|
|
||||||
.ipynb_checkpoints
|
|
||||||
|
|
||||||
# IPython
|
|
||||||
profile_default/
|
|
||||||
ipython_config.py
|
|
||||||
|
|
||||||
# pyenv
|
# pyenv
|
||||||
# For a library or package, you might want to ignore these files since the code is
|
|
||||||
# intended to run in multiple environments; otherwise, check them in:
|
|
||||||
.python-version
|
.python-version
|
||||||
|
|
||||||
# pipenv
|
# pipenv
|
||||||
@@ -278,24 +251,12 @@ ipython_config.py
|
|||||||
# install all needed dependencies.
|
# install all needed dependencies.
|
||||||
#Pipfile.lock
|
#Pipfile.lock
|
||||||
|
|
||||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
# celery beat schedule file
|
||||||
__pypackages__/
|
|
||||||
|
|
||||||
# Celery stuff
|
|
||||||
celerybeat-schedule
|
celerybeat-schedule
|
||||||
celerybeat.pid
|
|
||||||
|
|
||||||
# SageMath parsed files
|
# SageMath parsed files
|
||||||
*.sage.py
|
*.sage.py
|
||||||
|
|
||||||
# Environments
|
|
||||||
.venv
|
|
||||||
env/
|
|
||||||
venv/
|
|
||||||
ENV/
|
|
||||||
env.bak/
|
|
||||||
venv.bak/
|
|
||||||
|
|
||||||
# Spyder project settings
|
# Spyder project settings
|
||||||
.spyderproject
|
.spyderproject
|
||||||
.spyproject
|
.spyproject
|
||||||
@@ -303,6 +264,10 @@ venv.bak/
|
|||||||
# Rope project settings
|
# Rope project settings
|
||||||
.ropeproject
|
.ropeproject
|
||||||
|
|
||||||
|
# Mr Developer
|
||||||
|
.mr.developer.cfg
|
||||||
|
.project
|
||||||
|
|
||||||
# mkdocs documentation
|
# mkdocs documentation
|
||||||
/site
|
/site
|
||||||
|
|
||||||
@@ -314,16 +279,9 @@ dmypy.json
|
|||||||
# Pyre type checker
|
# Pyre type checker
|
||||||
.pyre/
|
.pyre/
|
||||||
|
|
||||||
# pytype static type analyzer
|
|
||||||
.pytype/
|
|
||||||
|
|
||||||
# Cython debug symbols
|
|
||||||
cython_debug/
|
|
||||||
|
|
||||||
### Vim ###
|
### Vim ###
|
||||||
# Swap
|
# Swap
|
||||||
[._]*.s[a-v][a-z]
|
[._]*.s[a-v][a-z]
|
||||||
!*.svg # comment out if you don't need vector files
|
|
||||||
[._]*.sw[a-p]
|
[._]*.sw[a-p]
|
||||||
[._]s[a-rt-v][a-z]
|
[._]s[a-rt-v][a-z]
|
||||||
[._]ss[a-gi-z]
|
[._]ss[a-gi-z]
|
||||||
@@ -341,13 +299,11 @@ tags
|
|||||||
[._]*.un~
|
[._]*.un~
|
||||||
|
|
||||||
### WebStorm ###
|
### WebStorm ###
|
||||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
|
||||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||||
|
|
||||||
# User-specific stuff
|
# User-specific stuff
|
||||||
|
|
||||||
# AWS User-specific
|
|
||||||
|
|
||||||
# Generated files
|
# Generated files
|
||||||
|
|
||||||
# Sensitive or high-churn files
|
# Sensitive or high-churn files
|
||||||
@@ -358,9 +314,6 @@ tags
|
|||||||
# When using Gradle or Maven with auto-import, you should exclude module files,
|
# When using Gradle or Maven with auto-import, you should exclude module files,
|
||||||
# since they will be recreated, and may cause churn. Uncomment if using
|
# since they will be recreated, and may cause churn. Uncomment if using
|
||||||
# auto-import.
|
# auto-import.
|
||||||
# .idea/artifacts
|
|
||||||
# .idea/compiler.xml
|
|
||||||
# .idea/jarRepositories.xml
|
|
||||||
# .idea/modules.xml
|
# .idea/modules.xml
|
||||||
# .idea/*.iml
|
# .idea/*.iml
|
||||||
# .idea/modules
|
# .idea/modules
|
||||||
@@ -396,27 +349,15 @@ tags
|
|||||||
# *.ipr
|
# *.ipr
|
||||||
|
|
||||||
# Sonarlint plugin
|
# Sonarlint plugin
|
||||||
# https://plugins.jetbrains.com/plugin/7973-sonarlint
|
|
||||||
.idea/**/sonarlint/
|
.idea/**/sonarlint/
|
||||||
|
|
||||||
# SonarQube Plugin
|
# SonarQube Plugin
|
||||||
# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
|
|
||||||
.idea/**/sonarIssues.xml
|
.idea/**/sonarIssues.xml
|
||||||
|
|
||||||
# Markdown Navigator plugin
|
# Markdown Navigator plugin
|
||||||
# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
|
|
||||||
.idea/**/markdown-navigator.xml
|
.idea/**/markdown-navigator.xml
|
||||||
.idea/**/markdown-navigator-enh.xml
|
|
||||||
.idea/**/markdown-navigator/
|
.idea/**/markdown-navigator/
|
||||||
|
|
||||||
# Cache file creation bug
|
|
||||||
# See https://youtrack.jetbrains.com/issue/JBR-2257
|
|
||||||
.idea/$CACHE_FILE$
|
|
||||||
|
|
||||||
# CodeStream plugin
|
|
||||||
# https://plugins.jetbrains.com/plugin/12206-codestream
|
|
||||||
.idea/codestream.xml
|
|
||||||
|
|
||||||
### Windows ###
|
### Windows ###
|
||||||
# Windows thumbnail cache files
|
# Windows thumbnail cache files
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
@@ -443,4 +384,4 @@ $RECYCLE.BIN/
|
|||||||
# Windows shortcuts
|
# Windows shortcuts
|
||||||
*.lnk
|
*.lnk
|
||||||
|
|
||||||
# End of https://www.toptal.com/developers/gitignore/api/git,linux,pydev,python,windows,pycharm+all,jupyternotebook,vim,webstorm,emacs,dotenv
|
# End of https://www.gitignore.io/api/git,linux,pydev,python,windows,pycharm+all,jupyternotebook,vim,webstorm,emacs,dotenv
|
||||||
|
|||||||
1626
CHANGELOG.rst
1626
CHANGELOG.rst
File diff suppressed because it is too large
Load Diff
@@ -23,9 +23,9 @@ Note that reviewing does not only mean code review, but also offering comments o
|
|||||||
Also, consider taking up a valuable, reviewed, but abandoned pull request which you could politely ask the original authors to complete yourself.
|
Also, consider taking up a valuable, reviewed, but abandoned pull request which you could politely ask the original authors to complete yourself.
|
||||||
|
|
||||||
* Try committing your changes with an informative but short commit message.
|
* Try committing your changes with an informative but short commit message.
|
||||||
* Do not squash your commits and force-push to your branch if not needed. Reviews of your pull request are much easier with individual commits to comprehend the pull request history. All commits of your pull request branch will be squashed into one commit by GitHub upon merge.
|
* All commits of a pull request branch will be squashed into one commit at last. That does not mean you must have only one commit on your pull request, though!
|
||||||
|
* Please try not to force-push if it is not needed, so reviewers and other users looking at your pull request later can see the pull request commit history.
|
||||||
* Do not add merge commits to your PR. The bot will complain and you will have to rebase ([instructions for rebasing](https://docs.ansible.com/ansible/latest/dev_guide/developing_rebasing.html)) to remove them before your PR can be merged. To avoid that git automatically does merges during pulls, you can configure it to do rebases instead by running `git config pull.rebase true` inside the respository checkout.
|
* Do not add merge commits to your PR. The bot will complain and you will have to rebase ([instructions for rebasing](https://docs.ansible.com/ansible/latest/dev_guide/developing_rebasing.html)) to remove them before your PR can be merged. To avoid that git automatically does merges during pulls, you can configure it to do rebases instead by running `git config pull.rebase true` inside the respository checkout.
|
||||||
* Make sure your PR includes a [changelog fragment](https://docs.ansible.com/ansible/devel/community/development_process.html#changelogs-how-to). (You must not include a fragment for new modules or new plugins, except for test and filter plugins. Also you shouldn't include one for docs-only changes. If you're not sure, simply don't include one, we'll tell you whether one is needed or not :) )
|
|
||||||
|
|
||||||
You can also read [our Quick-start development guide](https://github.com/ansible/community-docs/blob/main/create_pr_quick_start_guide.rst).
|
You can also read [our Quick-start development guide](https://github.com/ansible/community-docs/blob/main/create_pr_quick_start_guide.rst).
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Community General Collection
|
# Community General Collection
|
||||||
|
|
||||||
[](https://dev.azure.com/ansible/community.general/_build?definitionId=31)
|
[](https://dev.azure.com/ansible/community.general/_build?definitionId=31)
|
||||||
[](https://codecov.io/gh/ansible-collections/community.general)
|
[](https://codecov.io/gh/ansible-collections/community.general)
|
||||||
|
|
||||||
This repository contains the `community.general` Ansible Collection. The collection is a part of the Ansible package and includes many modules and plugins supported by Ansible community which are not part of more specialized community collections.
|
This repository contains the `community.general` Ansible Collection. The collection is a part of the Ansible package and includes many modules and plugins supported by Ansible community which are not part of more specialized community collections.
|
||||||
@@ -58,9 +58,7 @@ See [Ansible Using collections](https://docs.ansible.com/ansible/latest/user_gui
|
|||||||
|
|
||||||
## Contributing to this collection
|
## Contributing to this collection
|
||||||
|
|
||||||
The content of this collection is made by good people just like you, a community of individuals collaborating on making the world better through developing automation software.
|
The content of this collection is made by good people like you, a community of individuals collaborating on making the world better through developing automation software.
|
||||||
|
|
||||||
We are actively accepting new contributors.
|
|
||||||
|
|
||||||
All types of contributions are very welcome.
|
All types of contributions are very welcome.
|
||||||
|
|
||||||
@@ -99,7 +97,7 @@ Basic instructions without release branches:
|
|||||||
|
|
||||||
## Release notes
|
## Release notes
|
||||||
|
|
||||||
See the [changelog](https://github.com/ansible-collections/community.general/blob/stable-2/CHANGELOG.rst).
|
See the [changelog](https://github.com/ansible-collections/community.general/blob/stable-3/CHANGELOG.rst).
|
||||||
|
|
||||||
## Roadmap
|
## Roadmap
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -68,6 +68,7 @@ Individuals who have been asked to become a part of this group have generally be
|
|||||||
| Name | GitHub ID | IRC Nick | Other |
|
| Name | GitHub ID | IRC Nick | Other |
|
||||||
| ------------------- | -------------------- | ------------------ | -------------------- |
|
| ------------------- | -------------------- | ------------------ | -------------------- |
|
||||||
| Alexei Znamensky | russoz | russoz | |
|
| Alexei Znamensky | russoz | russoz | |
|
||||||
|
| Amin Vakil | aminvakil | aminvakil | |
|
||||||
| Andrew Klychkov | andersson007 | andersson007_ | |
|
| Andrew Klychkov | andersson007 | andersson007_ | |
|
||||||
| Felix Fontein | felixfontein | felixfontein | |
|
| Felix Fontein | felixfontein | felixfontein | |
|
||||||
| John R Barker | gundalow | gundalow | |
|
| John R Barker | gundalow | gundalow | |
|
||||||
|
|||||||
5
docs/docsite/extra-docs.yml
Normal file
5
docs/docsite/extra-docs.yml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
---
|
||||||
|
sections:
|
||||||
|
- title: Guides
|
||||||
|
toctree:
|
||||||
|
- filter_guide
|
||||||
753
docs/docsite/rst/filter_guide.rst
Normal file
753
docs/docsite/rst/filter_guide.rst
Normal file
@@ -0,0 +1,753 @@
|
|||||||
|
.. _ansible_collections.community.general.docsite.filter_guide:
|
||||||
|
|
||||||
|
community.general Filter Guide
|
||||||
|
==============================
|
||||||
|
|
||||||
|
The :ref:`community.general collection <plugins_in_community.general>` offers several useful filter plugins.
|
||||||
|
|
||||||
|
.. contents:: Topics
|
||||||
|
|
||||||
|
Paths
|
||||||
|
-----
|
||||||
|
|
||||||
|
The ``path_join`` filter has been added in ansible-base 2.10. If you want to use this filter, but also need to support Ansible 2.9, you can use ``community.general``'s ``path_join`` shim, ``community.general.path_join``. This filter redirects to ``path_join`` for ansible-base 2.10 and ansible-core 2.11 or newer, and re-implements the filter for Ansible 2.9.
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
# ansible-base 2.10 or newer:
|
||||||
|
path: {{ ('/etc', path, 'subdir', file) | path_join }}
|
||||||
|
|
||||||
|
# Also works with Ansible 2.9:
|
||||||
|
path: {{ ('/etc', path, 'subdir', file) | community.general.path_join }}
|
||||||
|
|
||||||
|
.. versionadded:: 3.0.0
|
||||||
|
|
||||||
|
Abstract transformations
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
Dictionaries
|
||||||
|
^^^^^^^^^^^^
|
||||||
|
|
||||||
|
You can use the ``dict_kv`` filter to create a single-entry dictionary with ``value | community.general.dict_kv(key)``:
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: Create a single-entry dictionary
|
||||||
|
debug:
|
||||||
|
msg: "{{ myvar | community.general.dict_kv('thatsmyvar') }}"
|
||||||
|
vars:
|
||||||
|
myvar: myvalue
|
||||||
|
|
||||||
|
- name: Create a list of dictionaries where the 'server' field is taken from a list
|
||||||
|
debug:
|
||||||
|
msg: >-
|
||||||
|
{{ myservers | map('community.general.dict_kv', 'server')
|
||||||
|
| map('combine', common_config) }}
|
||||||
|
vars:
|
||||||
|
common_config:
|
||||||
|
type: host
|
||||||
|
database: all
|
||||||
|
myservers:
|
||||||
|
- server1
|
||||||
|
- server2
|
||||||
|
|
||||||
|
This produces:
|
||||||
|
|
||||||
|
.. code-block:: ansible-output
|
||||||
|
|
||||||
|
TASK [Create a single-entry dictionary] **************************************************
|
||||||
|
ok: [localhost] => {
|
||||||
|
"msg": {
|
||||||
|
"thatsmyvar": "myvalue"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TASK [Create a list of dictionaries where the 'server' field is taken from a list] *******
|
||||||
|
ok: [localhost] => {
|
||||||
|
"msg": [
|
||||||
|
{
|
||||||
|
"database": "all",
|
||||||
|
"server": "server1",
|
||||||
|
"type": "host"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"database": "all",
|
||||||
|
"server": "server2",
|
||||||
|
"type": "host"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
.. versionadded:: 2.0.0
|
||||||
|
|
||||||
|
If you need to convert a list of key-value pairs to a dictionary, you can use the ``dict`` function. Unfortunately, this function cannot be used with ``map``. For this, the ``community.general.dict`` filter can be used:
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: Create a dictionary with the dict function
|
||||||
|
debug:
|
||||||
|
msg: "{{ dict([[1, 2], ['a', 'b']]) }}"
|
||||||
|
|
||||||
|
- name: Create a dictionary with the community.general.dict filter
|
||||||
|
debug:
|
||||||
|
msg: "{{ [[1, 2], ['a', 'b']] | community.general.dict }}"
|
||||||
|
|
||||||
|
- name: Create a list of dictionaries with map and the community.general.dict filter
|
||||||
|
debug:
|
||||||
|
msg: >-
|
||||||
|
{{ values | map('zip', ['k1', 'k2', 'k3'])
|
||||||
|
| map('map', 'reverse')
|
||||||
|
| map('community.general.dict') }}
|
||||||
|
vars:
|
||||||
|
values:
|
||||||
|
- - foo
|
||||||
|
- 23
|
||||||
|
- a
|
||||||
|
- - bar
|
||||||
|
- 42
|
||||||
|
- b
|
||||||
|
|
||||||
|
This produces:
|
||||||
|
|
||||||
|
.. code-block:: ansible-output
|
||||||
|
|
||||||
|
TASK [Create a dictionary with the dict function] ****************************************
|
||||||
|
ok: [localhost] => {
|
||||||
|
"msg": {
|
||||||
|
"1": 2,
|
||||||
|
"a": "b"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TASK [Create a dictionary with the community.general.dict filter] ************************
|
||||||
|
ok: [localhost] => {
|
||||||
|
"msg": {
|
||||||
|
"1": 2,
|
||||||
|
"a": "b"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TASK [Create a list of dictionaries with map and the community.general.dict filter] ******
|
||||||
|
ok: [localhost] => {
|
||||||
|
"msg": [
|
||||||
|
{
|
||||||
|
"k1": "foo",
|
||||||
|
"k2": 23,
|
||||||
|
"k3": "a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"k1": "bar",
|
||||||
|
"k2": 42,
|
||||||
|
"k3": "b"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
.. versionadded:: 3.0.0
|
||||||
|
|
||||||
|
Grouping
|
||||||
|
^^^^^^^^
|
||||||
|
|
||||||
|
If you have a list of dictionaries, the Jinja2 ``groupby`` filter allows to group the list by an attribute. This results in a list of ``(grouper, list)`` namedtuples, where ``list`` contains all dictionaries where the selected attribute equals ``grouper``. If you know that for every ``grouper``, there will be a most one entry in that list, you can use the ``community.general.groupby_as_dict`` filter to convert the original list into a dictionary which maps ``grouper`` to the corresponding dictionary.
|
||||||
|
|
||||||
|
One example is ``ansible_facts.mounts``, which is a list of dictionaries where each has one ``device`` element to indicate the device which is mounted. Therefore, ``ansible_facts.mounts | community.general.groupby_as_dict('device')`` is a dictionary mapping a device to the mount information:
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: Output mount facts grouped by device name
|
||||||
|
debug:
|
||||||
|
var: ansible_facts.mounts | community.general.groupby_as_dict('device')
|
||||||
|
|
||||||
|
- name: Output mount facts grouped by mount point
|
||||||
|
debug:
|
||||||
|
var: ansible_facts.mounts | community.general.groupby_as_dict('mount')
|
||||||
|
|
||||||
|
This produces:
|
||||||
|
|
||||||
|
.. code-block:: ansible-output
|
||||||
|
|
||||||
|
TASK [Output mount facts grouped by device name] ******************************************
|
||||||
|
ok: [localhost] => {
|
||||||
|
"ansible_facts.mounts | community.general.groupby_as_dict('device')": {
|
||||||
|
"/dev/sda1": {
|
||||||
|
"block_available": 2000,
|
||||||
|
"block_size": 4096,
|
||||||
|
"block_total": 2345,
|
||||||
|
"block_used": 345,
|
||||||
|
"device": "/dev/sda1",
|
||||||
|
"fstype": "ext4",
|
||||||
|
"inode_available": 500,
|
||||||
|
"inode_total": 512,
|
||||||
|
"inode_used": 12,
|
||||||
|
"mount": "/boot",
|
||||||
|
"options": "rw,relatime,data=ordered",
|
||||||
|
"size_available": 56821,
|
||||||
|
"size_total": 543210,
|
||||||
|
"uuid": "ab31cade-d9c1-484d-8482-8a4cbee5241a"
|
||||||
|
},
|
||||||
|
"/dev/sda2": {
|
||||||
|
"block_available": 1234,
|
||||||
|
"block_size": 4096,
|
||||||
|
"block_total": 12345,
|
||||||
|
"block_used": 11111,
|
||||||
|
"device": "/dev/sda2",
|
||||||
|
"fstype": "ext4",
|
||||||
|
"inode_available": 1111,
|
||||||
|
"inode_total": 1234,
|
||||||
|
"inode_used": 123,
|
||||||
|
"mount": "/",
|
||||||
|
"options": "rw,relatime",
|
||||||
|
"size_available": 42143,
|
||||||
|
"size_total": 543210,
|
||||||
|
"uuid": "abcdef01-2345-6789-0abc-def012345678"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TASK [Output mount facts grouped by mount point] ******************************************
|
||||||
|
ok: [localhost] => {
|
||||||
|
"ansible_facts.mounts | community.general.groupby_as_dict('mount')": {
|
||||||
|
"/": {
|
||||||
|
"block_available": 1234,
|
||||||
|
"block_size": 4096,
|
||||||
|
"block_total": 12345,
|
||||||
|
"block_used": 11111,
|
||||||
|
"device": "/dev/sda2",
|
||||||
|
"fstype": "ext4",
|
||||||
|
"inode_available": 1111,
|
||||||
|
"inode_total": 1234,
|
||||||
|
"inode_used": 123,
|
||||||
|
"mount": "/",
|
||||||
|
"options": "rw,relatime",
|
||||||
|
"size_available": 42143,
|
||||||
|
"size_total": 543210,
|
||||||
|
"uuid": "bdf50b7d-4859-40af-8665-c637ee7a7808"
|
||||||
|
},
|
||||||
|
"/boot": {
|
||||||
|
"block_available": 2000,
|
||||||
|
"block_size": 4096,
|
||||||
|
"block_total": 2345,
|
||||||
|
"block_used": 345,
|
||||||
|
"device": "/dev/sda1",
|
||||||
|
"fstype": "ext4",
|
||||||
|
"inode_available": 500,
|
||||||
|
"inode_total": 512,
|
||||||
|
"inode_used": 12,
|
||||||
|
"mount": "/boot",
|
||||||
|
"options": "rw,relatime,data=ordered",
|
||||||
|
"size_available": 56821,
|
||||||
|
"size_total": 543210,
|
||||||
|
"uuid": "ab31cade-d9c1-484d-8482-8a4cbee5241a"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.. versionadded: 3.0.0
|
||||||
|
|
||||||
|
Merging lists of dictionaries
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
If you have two lists of dictionaries and want to combine them into a list of merged dictionaries, where two dictionaries are merged if they coincide in one attribute, you can use the ``lists_mergeby`` filter.
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: Merge two lists by common attribute 'name'
|
||||||
|
debug:
|
||||||
|
var: list1 | community.general.lists_mergeby(list2, 'name')
|
||||||
|
vars:
|
||||||
|
list1:
|
||||||
|
- name: foo
|
||||||
|
extra: true
|
||||||
|
- name: bar
|
||||||
|
extra: false
|
||||||
|
- name: meh
|
||||||
|
extra: true
|
||||||
|
list2:
|
||||||
|
- name: foo
|
||||||
|
path: /foo
|
||||||
|
- name: baz
|
||||||
|
path: /bazzz
|
||||||
|
|
||||||
|
This produces:
|
||||||
|
|
||||||
|
.. code-block:: ansible-output
|
||||||
|
|
||||||
|
TASK [Merge two lists by common attribute 'name'] ****************************************
|
||||||
|
ok: [localhost] => {
|
||||||
|
"list1 | community.general.lists_mergeby(list2, 'name')": [
|
||||||
|
{
|
||||||
|
"extra": false,
|
||||||
|
"name": "bar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "baz",
|
||||||
|
"path": "/bazzz"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"extra": true,
|
||||||
|
"name": "foo",
|
||||||
|
"path": "/foo"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"extra": true,
|
||||||
|
"name": "meh"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
.. versionadded: 2.0.0
|
||||||
|
|
||||||
|
Working with times
|
||||||
|
------------------
|
||||||
|
|
||||||
|
The ``to_time_unit`` filter allows to convert times from a human-readable string to a unit. For example, ``'4h 30min 12second' | community.general.to_time_unit('hour')`` gives the number of hours that correspond to 4 hours, 30 minutes and 12 seconds.
|
||||||
|
|
||||||
|
There are shorthands to directly convert to various units, like ``to_hours``, ``to_minutes``, ``to_seconds``, and so on. The following table lists all units that can be used:
|
||||||
|
|
||||||
|
.. list-table:: Units
|
||||||
|
:widths: 25 25 25 25
|
||||||
|
:header-rows: 1
|
||||||
|
|
||||||
|
* - Unit name
|
||||||
|
- Unit value in seconds
|
||||||
|
- Unit strings for filter
|
||||||
|
- Shorthand filter
|
||||||
|
* - Millisecond
|
||||||
|
- 1/1000 second
|
||||||
|
- ``ms``, ``millisecond``, ``milliseconds``, ``msec``, ``msecs``, ``msecond``, ``mseconds``
|
||||||
|
- ``to_milliseconds``
|
||||||
|
* - Second
|
||||||
|
- 1 second
|
||||||
|
- ``s``, ``sec``, ``secs``, ``second``, ``seconds``
|
||||||
|
- ``to_seconds``
|
||||||
|
* - Minute
|
||||||
|
- 60 seconds
|
||||||
|
- ``m``, ``min``, ``mins``, ``minute``, ``minutes``
|
||||||
|
- ``to_minutes``
|
||||||
|
* - Hour
|
||||||
|
- 60*60 seconds
|
||||||
|
- ``h``, ``hour``, ``hours``
|
||||||
|
- ``to_hours``
|
||||||
|
* - Day
|
||||||
|
- 24*60*60 seconds
|
||||||
|
- ``d``, ``day``, ``days``
|
||||||
|
- ``to_days``
|
||||||
|
* - Week
|
||||||
|
- 7*24*60*60 seconds
|
||||||
|
- ``w``, ``week``, ``weeks``
|
||||||
|
- ``to_weeks``
|
||||||
|
* - Month
|
||||||
|
- 30*24*60*60 seconds
|
||||||
|
- ``mo``, ``month``, ``months``
|
||||||
|
- ``to_months``
|
||||||
|
* - Year
|
||||||
|
- 365*24*60*60 seconds
|
||||||
|
- ``y``, ``year``, ``years``
|
||||||
|
- ``to_years``
|
||||||
|
|
||||||
|
Note that months and years are using a simplified representation: a month is 30 days, and a year is 365 days. If you need different definitions of months or years, you can pass them as keyword arguments. For example, if you want a year to be 365.25 days, and a month to be 30.5 days, you can write ``'11months 4' | community.general.to_years(year=365.25, month=30.5)``. These keyword arguments can be specified to ``to_time_unit`` and to all shorthand filters.
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: Convert string to seconds
|
||||||
|
debug:
|
||||||
|
msg: "{{ '30h 20m 10s 123ms' | community.general.to_time_unit('seconds') }}"
|
||||||
|
|
||||||
|
- name: Convert string to hours
|
||||||
|
debug:
|
||||||
|
msg: "{{ '30h 20m 10s 123ms' | community.general.to_hours }}"
|
||||||
|
|
||||||
|
- name: Convert string to years (using 365.25 days == 1 year)
|
||||||
|
debug:
|
||||||
|
msg: "{{ '400d 15h' | community.general.to_years(year=365.25) }}"
|
||||||
|
|
||||||
|
This produces:
|
||||||
|
|
||||||
|
.. code-block:: ansible-output
|
||||||
|
|
||||||
|
TASK [Convert string to seconds] **********************************************************
|
||||||
|
ok: [localhost] => {
|
||||||
|
"msg": "109210.123"
|
||||||
|
}
|
||||||
|
|
||||||
|
TASK [Convert string to hours] ************************************************************
|
||||||
|
ok: [localhost] => {
|
||||||
|
"msg": "30.336145277778"
|
||||||
|
}
|
||||||
|
|
||||||
|
TASK [Convert string to years (using 365.25 days == 1 year)] ******************************
|
||||||
|
ok: [localhost] => {
|
||||||
|
"msg": "1.096851471595"
|
||||||
|
}
|
||||||
|
|
||||||
|
.. versionadded: 0.2.0
|
||||||
|
|
||||||
|
Working with versions
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
If you need to sort a list of version numbers, the Jinja ``sort`` filter is problematic. Since it sorts lexicographically, ``2.10`` will come before ``2.9``. To treat version numbers correctly, you can use the ``version_sort`` filter:
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: Sort list by version number
|
||||||
|
debug:
|
||||||
|
var: ansible_versions | community.general.version_sort
|
||||||
|
vars:
|
||||||
|
ansible_versions:
|
||||||
|
- '2.8.0'
|
||||||
|
- '2.11.0'
|
||||||
|
- '2.7.0'
|
||||||
|
- '2.10.0'
|
||||||
|
- '2.9.0'
|
||||||
|
|
||||||
|
This produces:
|
||||||
|
|
||||||
|
.. code-block:: ansible-output
|
||||||
|
|
||||||
|
TASK [Sort list by version number] ********************************************************
|
||||||
|
ok: [localhost] => {
|
||||||
|
"ansible_versions | community.general.version_sort": [
|
||||||
|
"2.7.0",
|
||||||
|
"2.8.0",
|
||||||
|
"2.9.0",
|
||||||
|
"2.10.0",
|
||||||
|
"2.11.0"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
.. versionadded: 2.2.0
|
||||||
|
|
||||||
|
Creating identifiers
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
The following filters allow to create identifiers.
|
||||||
|
|
||||||
|
Hashids
|
||||||
|
^^^^^^^
|
||||||
|
|
||||||
|
`Hashids <https://hashids.org/>`_ allow to convert sequences of integers to short unique string identifiers. This filter needs the `hashids Python library <https://pypi.org/project/hashids/>`_ installed on the controller.
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: "Create hashid"
|
||||||
|
debug:
|
||||||
|
msg: "{{ [1234, 5, 6] | community.general.hashids_encode }}"
|
||||||
|
|
||||||
|
- name: "Decode hashid"
|
||||||
|
debug:
|
||||||
|
msg: "{{ 'jm2Cytn' | community.general.hashids_decode }}"
|
||||||
|
|
||||||
|
This produces:
|
||||||
|
|
||||||
|
.. code-block:: ansible-output
|
||||||
|
|
||||||
|
TASK [Create hashid] **********************************************************************
|
||||||
|
ok: [localhost] => {
|
||||||
|
"msg": "jm2Cytn"
|
||||||
|
}
|
||||||
|
|
||||||
|
TASK [Decode hashid] **********************************************************************
|
||||||
|
ok: [localhost] => {
|
||||||
|
"msg": [
|
||||||
|
1234,
|
||||||
|
5,
|
||||||
|
6
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
The hashids filters accept keyword arguments to allow fine-tuning the hashids generated:
|
||||||
|
|
||||||
|
:salt: String to use as salt when hashing.
|
||||||
|
:alphabet: String of 16 or more unique characters to produce a hash.
|
||||||
|
:min_length: Minimum length of hash produced.
|
||||||
|
|
||||||
|
.. versionadded: 3.0.0
|
||||||
|
|
||||||
|
Random MACs
|
||||||
|
^^^^^^^^^^^
|
||||||
|
|
||||||
|
You can use the ``random_mac`` filter to complete a partial `MAC address <https://en.wikipedia.org/wiki/MAC_address>`_ to a random 6-byte MAC address.
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: "Create a random MAC starting with ff:"
|
||||||
|
debug:
|
||||||
|
msg: "{{ 'FF' | community.general.random_mac }}"
|
||||||
|
|
||||||
|
- name: "Create a random MAC starting with 00:11:22:"
|
||||||
|
debug:
|
||||||
|
msg: "{{ '00:11:22' | community.general.random_mac }}"
|
||||||
|
|
||||||
|
This produces:
|
||||||
|
|
||||||
|
.. code-block:: ansible-output
|
||||||
|
|
||||||
|
TASK [Create a random MAC starting with ff:] **********************************************
|
||||||
|
ok: [localhost] => {
|
||||||
|
"msg": "ff:69:d3:78:7f:b4"
|
||||||
|
}
|
||||||
|
|
||||||
|
TASK [Create a random MAC starting with 00:11:22:] ****************************************
|
||||||
|
ok: [localhost] => {
|
||||||
|
"msg": "00:11:22:71:5d:3b"
|
||||||
|
}
|
||||||
|
|
||||||
|
You can also initialize the random number generator from a seed to create random-but-idempotent MAC addresses:
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
"{{ '52:54:00' | community.general.random_mac(seed=inventory_hostname) }}"
|
||||||
|
|
||||||
|
Conversions
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Parsing CSV files
|
||||||
|
^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Ansible offers the :ref:`community.general.read_csv module <ansible_collections.community.general.read_csv_module>` to read CSV files. Sometimes you need to convert strings to CSV files instead. For this, the ``from_csv`` filter exists.
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: "Parse CSV from string"
|
||||||
|
debug:
|
||||||
|
msg: "{{ csv_string | community.general.from_csv }}"
|
||||||
|
vars:
|
||||||
|
csv_string: |
|
||||||
|
foo,bar,baz
|
||||||
|
1,2,3
|
||||||
|
you,this,then
|
||||||
|
|
||||||
|
This produces:
|
||||||
|
|
||||||
|
.. code-block:: ansible-output
|
||||||
|
|
||||||
|
TASK [Parse CSV from string] **************************************************************
|
||||||
|
ok: [localhost] => {
|
||||||
|
"msg": [
|
||||||
|
{
|
||||||
|
"bar": "2",
|
||||||
|
"baz": "3",
|
||||||
|
"foo": "1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"bar": "this",
|
||||||
|
"baz": "then",
|
||||||
|
"foo": "you"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
The ``from_csv`` filter has several keyword arguments to control its behavior:
|
||||||
|
|
||||||
|
:dialect: Dialect of the CSV file. Default is ``excel``. Other possible choices are ``excel-tab`` and ``unix``. If one of ``delimiter``, ``skipinitialspace`` or ``strict`` is specified, ``dialect`` is ignored.
|
||||||
|
:fieldnames: A set of column names to use. If not provided, the first line of the CSV is assumed to contain the column names.
|
||||||
|
:delimiter: Sets the delimiter to use. Default depends on the dialect used.
|
||||||
|
:skipinitialspace: Set to ``true`` to ignore space directly after the delimiter. Default depends on the dialect used (usually ``false``).
|
||||||
|
:strict: Set to ``true`` to error out on invalid CSV input.
|
||||||
|
|
||||||
|
.. versionadded: 3.0.0
|
||||||
|
|
||||||
|
Converting to JSON
|
||||||
|
^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
`JC <https://pypi.org/project/jc/>`_ is a CLI tool and Python library which allows to interpret output of various CLI programs as JSON. It is also available as a filter in community.general. This filter needs the `jc Python library <https://pypi.org/project/jc/>`_ installed on the controller.
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: Run 'ls' to list files in /
|
||||||
|
command: ls /
|
||||||
|
register: result
|
||||||
|
|
||||||
|
- name: Parse the ls output
|
||||||
|
debug:
|
||||||
|
msg: "{{ result.stdout | community.general.jc('ls') }}"
|
||||||
|
|
||||||
|
This produces:
|
||||||
|
|
||||||
|
.. code-block:: ansible-output
|
||||||
|
|
||||||
|
TASK [Run 'ls' to list files in /] ********************************************************
|
||||||
|
changed: [localhost]
|
||||||
|
|
||||||
|
TASK [Parse the ls output] ****************************************************************
|
||||||
|
ok: [localhost] => {
|
||||||
|
"msg": [
|
||||||
|
{
|
||||||
|
"filename": "bin"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "boot"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "dev"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "etc"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "home"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "lib"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "proc"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "root"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "run"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filename": "tmp"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
.. versionadded: 2.0.0
|
||||||
|
|
||||||
|
.. _ansible_collections.community.general.docsite.json_query_filter:
|
||||||
|
|
||||||
|
Selecting JSON data: JSON queries
|
||||||
|
---------------------------------
|
||||||
|
|
||||||
|
To select a single element or a data subset from a complex data structure in JSON format (for example, Ansible facts), use the ``json_query`` filter. The ``json_query`` filter lets you query a complex JSON structure and iterate over it using a loop structure.
|
||||||
|
|
||||||
|
.. note:: You must manually install the **jmespath** dependency on the Ansible controller before using this filter. This filter is built upon **jmespath**, and you can use the same syntax. For examples, see `jmespath examples <http://jmespath.org/examples.html>`_.
|
||||||
|
|
||||||
|
Consider this data structure:
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
{
|
||||||
|
"domain_definition": {
|
||||||
|
"domain": {
|
||||||
|
"cluster": [
|
||||||
|
{
|
||||||
|
"name": "cluster1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "cluster2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"server": [
|
||||||
|
{
|
||||||
|
"name": "server11",
|
||||||
|
"cluster": "cluster1",
|
||||||
|
"port": "8080"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "server12",
|
||||||
|
"cluster": "cluster1",
|
||||||
|
"port": "8090"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "server21",
|
||||||
|
"cluster": "cluster2",
|
||||||
|
"port": "9080"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "server22",
|
||||||
|
"cluster": "cluster2",
|
||||||
|
"port": "9090"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"library": [
|
||||||
|
{
|
||||||
|
"name": "lib1",
|
||||||
|
"target": "cluster1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "lib2",
|
||||||
|
"target": "cluster2"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
To extract all clusters from this structure, you can use the following query:
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: Display all cluster names
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: item
|
||||||
|
loop: "{{ domain_definition | community.general.json_query('domain.cluster[*].name') }}"
|
||||||
|
|
||||||
|
To extract all server names:
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: Display all server names
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: item
|
||||||
|
loop: "{{ domain_definition | community.general.json_query('domain.server[*].name') }}"
|
||||||
|
|
||||||
|
To extract ports from cluster1:
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: Display all ports from cluster1
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: item
|
||||||
|
loop: "{{ domain_definition | community.general.json_query(server_name_cluster1_query) }}"
|
||||||
|
vars:
|
||||||
|
server_name_cluster1_query: "domain.server[?cluster=='cluster1'].port"
|
||||||
|
|
||||||
|
.. note:: You can use a variable to make the query more readable.
|
||||||
|
|
||||||
|
To print out the ports from cluster1 in a comma separated string:
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: Display all ports from cluster1 as a string
|
||||||
|
ansible.builtin.debug:
|
||||||
|
msg: "{{ domain_definition | community.general.json_query('domain.server[?cluster==`cluster1`].port') | join(', ') }}"
|
||||||
|
|
||||||
|
.. note:: In the example above, quoting literals using backticks avoids escaping quotes and maintains readability.
|
||||||
|
|
||||||
|
You can use YAML `single quote escaping <https://yaml.org/spec/current.html#id2534365>`_:
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: Display all ports from cluster1
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: item
|
||||||
|
loop: "{{ domain_definition | community.general.json_query('domain.server[?cluster==''cluster1''].port') }}"
|
||||||
|
|
||||||
|
.. note:: Escaping single quotes within single quotes in YAML is done by doubling the single quote.
|
||||||
|
|
||||||
|
To get a hash map with all ports and names of a cluster:
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: Display all server ports and names from cluster1
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: item
|
||||||
|
loop: "{{ domain_definition | community.general.json_query(server_name_cluster1_query) }}"
|
||||||
|
vars:
|
||||||
|
server_name_cluster1_query: "domain.server[?cluster=='cluster2'].{name: name, port: port}"
|
||||||
|
|
||||||
|
To extract ports from all clusters with name starting with 'server1':
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: Display all ports from cluster1
|
||||||
|
ansible.builtin.debug:
|
||||||
|
msg: "{{ domain_definition | to_json | from_json | community.general.json_query(server_name_query) }}"
|
||||||
|
vars:
|
||||||
|
server_name_query: "domain.server[?starts_with(name,'server1')].port"
|
||||||
|
|
||||||
|
To extract ports from all clusters with name containing 'server1':
|
||||||
|
|
||||||
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
- name: Display all ports from cluster1
|
||||||
|
ansible.builtin.debug:
|
||||||
|
msg: "{{ domain_definition | to_json | from_json | community.general.json_query(server_name_query) }}"
|
||||||
|
vars:
|
||||||
|
server_name_query: "domain.server[?contains(name,'server1')].port"
|
||||||
|
|
||||||
|
.. note:: while using ``starts_with`` and ``contains``, you have to use `` to_json | from_json `` filter for correct parsing of data structure.
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
namespace: community
|
namespace: community
|
||||||
name: general
|
name: general
|
||||||
version: 2.5.5
|
version: 3.3.1
|
||||||
readme: README.md
|
readme: README.md
|
||||||
authors:
|
authors:
|
||||||
- Ansible (https://github.com/ansible)
|
- Ansible (https://github.com/ansible)
|
||||||
|
|||||||
318
meta/runtime.yml
318
meta/runtime.yml
@@ -1,31 +1,5 @@
|
|||||||
---
|
---
|
||||||
requires_ansible: '>=2.9.10'
|
requires_ansible: '>=2.9.10'
|
||||||
action_groups:
|
|
||||||
ovirt:
|
|
||||||
- ovirt_affinity_label_facts
|
|
||||||
- ovirt_api_facts
|
|
||||||
- ovirt_cluster_facts
|
|
||||||
- ovirt_datacenter_facts
|
|
||||||
- ovirt_disk_facts
|
|
||||||
- ovirt_event_facts
|
|
||||||
- ovirt_external_provider_facts
|
|
||||||
- ovirt_group_facts
|
|
||||||
- ovirt_host_facts
|
|
||||||
- ovirt_host_storage_facts
|
|
||||||
- ovirt_network_facts
|
|
||||||
- ovirt_nic_facts
|
|
||||||
- ovirt_permission_facts
|
|
||||||
- ovirt_quota_facts
|
|
||||||
- ovirt_scheduling_policy_facts
|
|
||||||
- ovirt_snapshot_facts
|
|
||||||
- ovirt_storage_domain_facts
|
|
||||||
- ovirt_storage_template_facts
|
|
||||||
- ovirt_storage_vm_facts
|
|
||||||
- ovirt_tag_facts
|
|
||||||
- ovirt_template_facts
|
|
||||||
- ovirt_user_facts
|
|
||||||
- ovirt_vm_facts
|
|
||||||
- ovirt_vmpool_facts
|
|
||||||
plugin_routing:
|
plugin_routing:
|
||||||
connection:
|
connection:
|
||||||
docker:
|
docker:
|
||||||
@@ -37,9 +11,24 @@ plugin_routing:
|
|||||||
redirect: community.google.gcp_storage_file
|
redirect: community.google.gcp_storage_file
|
||||||
hashi_vault:
|
hashi_vault:
|
||||||
redirect: community.hashi_vault.hashi_vault
|
redirect: community.hashi_vault.hashi_vault
|
||||||
|
nios:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios lookup plugin has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_lookup instead.
|
||||||
|
nios_next_ip:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_next_ip lookup plugin has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_next_ip instead.
|
||||||
|
nios_next_network:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_next_network lookup plugin has been
|
||||||
|
deprecated. Please use infoblox.nios_modules.nios_next_network instead.
|
||||||
modules:
|
modules:
|
||||||
ali_instance_facts:
|
ali_instance_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.ali_instance_info instead.
|
warning_text: Use community.general.ali_instance_info instead.
|
||||||
docker_compose:
|
docker_compose:
|
||||||
@@ -141,11 +130,13 @@ plugin_routing:
|
|||||||
gcp_forwarding_rule:
|
gcp_forwarding_rule:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 2.0.0
|
removal_version: 2.0.0
|
||||||
warning_text: Use google.cloud.gcp_compute_forwarding_rule or google.cloud.gcp_compute_global_forwarding_rule instead.
|
warning_text: Use google.cloud.gcp_compute_forwarding_rule or google.cloud.gcp_compute_global_forwarding_rule
|
||||||
|
instead.
|
||||||
gcp_healthcheck:
|
gcp_healthcheck:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 2.0.0
|
removal_version: 2.0.0
|
||||||
warning_text: Use google.cloud.gcp_compute_health_check, google.cloud.gcp_compute_http_health_check or google.cloud.gcp_compute_https_health_check instead.
|
warning_text: Use google.cloud.gcp_compute_health_check, google.cloud.gcp_compute_http_health_check
|
||||||
|
or google.cloud.gcp_compute_https_health_check instead.
|
||||||
gcp_target_proxy:
|
gcp_target_proxy:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 2.0.0
|
removal_version: 2.0.0
|
||||||
@@ -156,37 +147,22 @@ plugin_routing:
|
|||||||
warning_text: Use google.cloud.gcp_compute_url_map instead.
|
warning_text: Use google.cloud.gcp_compute_url_map instead.
|
||||||
gcpubsub:
|
gcpubsub:
|
||||||
redirect: community.google.gcpubsub
|
redirect: community.google.gcpubsub
|
||||||
gcpubsub_info:
|
|
||||||
redirect: community.google.gcpubsub_info
|
|
||||||
gcpubsub_facts:
|
gcpubsub_facts:
|
||||||
redirect: community.google.gcpubsub_info
|
tombstone:
|
||||||
deprecation:
|
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.google.gcpubsub_info instead.
|
warning_text: Use community.google.gcpubsub_info instead.
|
||||||
|
gcpubsub_info:
|
||||||
|
redirect: community.google.gcpubsub_info
|
||||||
gcspanner:
|
gcspanner:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 2.0.0
|
removal_version: 2.0.0
|
||||||
warning_text: Use google.cloud.gcp_spanner_database and/or google.cloud.gcp_spanner_instance instead.
|
warning_text: Use google.cloud.gcp_spanner_database and/or google.cloud.gcp_spanner_instance
|
||||||
|
instead.
|
||||||
github_hooks:
|
github_hooks:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 2.0.0
|
removal_version: 2.0.0
|
||||||
warning_text: Use community.general.github_webhook and community.general.github_webhook_info instead.
|
warning_text: Use community.general.github_webhook and community.general.github_webhook_info
|
||||||
gluster_heal_info:
|
instead.
|
||||||
deprecation:
|
|
||||||
removal_version: 3.0.0
|
|
||||||
warning_text: The gluster modules have migrated to the gluster.gluster collection. Use gluster.gluster.gluster_heal_info instead.
|
|
||||||
gluster_peer:
|
|
||||||
deprecation:
|
|
||||||
removal_version: 3.0.0
|
|
||||||
warning_text: The gluster modules have migrated to the gluster.gluster collection. Use gluster.gluster.gluster_peer instead.
|
|
||||||
gluster_volume:
|
|
||||||
deprecation:
|
|
||||||
removal_version: 3.0.0
|
|
||||||
warning_text: The gluster modules have migrated to the gluster.gluster collection. Use gluster.gluster.gluster_volume instead.
|
|
||||||
helm:
|
|
||||||
deprecation:
|
|
||||||
removal_version: 3.0.0
|
|
||||||
warning_text: The helm module in community.general has been deprecated. Use community.kubernetes.helm instead.
|
|
||||||
hetzner_failover_ip:
|
hetzner_failover_ip:
|
||||||
redirect: community.hrobot.failover_ip
|
redirect: community.hrobot.failover_ip
|
||||||
hetzner_failover_ip_info:
|
hetzner_failover_ip_info:
|
||||||
@@ -196,15 +172,19 @@ plugin_routing:
|
|||||||
hetzner_firewall_info:
|
hetzner_firewall_info:
|
||||||
redirect: community.hrobot.firewall_info
|
redirect: community.hrobot.firewall_info
|
||||||
hpilo_facts:
|
hpilo_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.hpilo_info instead.
|
warning_text: Use community.general.hpilo_info instead.
|
||||||
|
idrac_firmware:
|
||||||
|
redirect: dellemc.openmanage.idrac_firmware
|
||||||
idrac_redfish_facts:
|
idrac_redfish_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.idrac_redfish_info instead.
|
warning_text: Use community.general.idrac_redfish_info instead.
|
||||||
|
idrac_server_config_profile:
|
||||||
|
redirect: dellemc.openmanage.idrac_server_config_profile
|
||||||
jenkins_job_facts:
|
jenkins_job_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.jenkins_job_info instead.
|
warning_text: Use community.general.jenkins_job_info instead.
|
||||||
katello:
|
katello:
|
||||||
@@ -224,23 +204,25 @@ plugin_routing:
|
|||||||
kubevirt_vm:
|
kubevirt_vm:
|
||||||
redirect: community.kubevirt.kubevirt_vm
|
redirect: community.kubevirt.kubevirt_vm
|
||||||
ldap_attr:
|
ldap_attr:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.ldap_attrs instead.
|
warning_text: Use community.general.ldap_attrs instead.
|
||||||
logicmonitor:
|
logicmonitor:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 1.0.0
|
removal_version: 1.0.0
|
||||||
warning_text: The logicmonitor_facts module is no longer maintained and the API used has been disabled in 2017.
|
warning_text: The logicmonitor_facts module is no longer maintained and the
|
||||||
|
API used has been disabled in 2017.
|
||||||
logicmonitor_facts:
|
logicmonitor_facts:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 1.0.0
|
removal_version: 1.0.0
|
||||||
warning_text: The logicmonitor_facts module is no longer maintained and the API used has been disabled in 2017.
|
warning_text: The logicmonitor_facts module is no longer maintained and the
|
||||||
|
API used has been disabled in 2017.
|
||||||
memset_memstore_facts:
|
memset_memstore_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.memset_memstore_info instead.
|
warning_text: Use community.general.memset_memstore_info instead.
|
||||||
memset_server_facts:
|
memset_server_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.memset_server_info instead.
|
warning_text: Use community.general.memset_server_info instead.
|
||||||
na_cdot_aggregate:
|
na_cdot_aggregate:
|
||||||
@@ -276,159 +258,242 @@ plugin_routing:
|
|||||||
removal_version: 2.0.0
|
removal_version: 2.0.0
|
||||||
warning_text: Use netapp.ontap.na_ontap_volume instead.
|
warning_text: Use netapp.ontap.na_ontap_volume instead.
|
||||||
na_ontap_gather_facts:
|
na_ontap_gather_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use netapp.ontap.na_ontap_info instead.
|
warning_text: Use netapp.ontap.na_ontap_info instead.
|
||||||
nginx_status_facts:
|
nginx_status_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.nginx_status_info instead.
|
warning_text: Use community.general.nginx_status_info instead.
|
||||||
one_image_facts:
|
nios_a_record:
|
||||||
deprecation:
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_a_record module has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_a_record instead.
|
||||||
|
nios_aaaa_record:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_aaaa_record module has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_aaaa_record instead.
|
||||||
|
nios_cname_record:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_cname_record module has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_cname_record instead.
|
||||||
|
nios_dns_view:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_dns_view module has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_dns_view instead.
|
||||||
|
nios_fixed_address:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_fixed_address module has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_fixed_address instead.
|
||||||
|
nios_host_record:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_host_record module has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_host_record instead.
|
||||||
|
nios_member:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_member module has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_member instead.
|
||||||
|
nios_mx_record:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_mx_record module has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_mx_record instead.
|
||||||
|
nios_naptr_record:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_naptr_record module has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_naptr_record instead.
|
||||||
|
nios_network:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_network module has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_network instead.
|
||||||
|
nios_network_view:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_network_view module has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_network_view instead.
|
||||||
|
nios_nsgroup:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_nsgroup module has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_nsgroup instead.
|
||||||
|
nios_ptr_record:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_ptr_record module has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_ptr_record instead.
|
||||||
|
nios_srv_record:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_srv_record module has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_srv_record instead.
|
||||||
|
nios_txt_record:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_txt_record module has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_txt_record instead.
|
||||||
|
nios_zone:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios_zone module has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios_zone instead.
|
||||||
|
ome_device_info:
|
||||||
|
redirect: dellemc.openmanage.ome_device_info
|
||||||
|
one_image_facts:
|
||||||
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.one_image_info instead.
|
warning_text: Use community.general.one_image_info instead.
|
||||||
onepassword_facts:
|
onepassword_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.onepassword_info instead.
|
warning_text: Use community.general.onepassword_info instead.
|
||||||
oneview_datacenter_facts:
|
oneview_datacenter_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.oneview_datacenter_info instead.
|
warning_text: Use community.general.oneview_datacenter_info instead.
|
||||||
oneview_enclosure_facts:
|
oneview_enclosure_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.oneview_enclosure_info instead.
|
warning_text: Use community.general.oneview_enclosure_info instead.
|
||||||
oneview_ethernet_network_facts:
|
oneview_ethernet_network_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.oneview_ethernet_network_info instead.
|
warning_text: Use community.general.oneview_ethernet_network_info instead.
|
||||||
oneview_fc_network_facts:
|
oneview_fc_network_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.oneview_fc_network_info instead.
|
warning_text: Use community.general.oneview_fc_network_info instead.
|
||||||
oneview_fcoe_network_facts:
|
oneview_fcoe_network_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.oneview_fcoe_network_info instead.
|
warning_text: Use community.general.oneview_fcoe_network_info instead.
|
||||||
oneview_logical_interconnect_group_facts:
|
oneview_logical_interconnect_group_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.oneview_logical_interconnect_group_info instead.
|
warning_text: Use community.general.oneview_logical_interconnect_group_info
|
||||||
|
instead.
|
||||||
oneview_network_set_facts:
|
oneview_network_set_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.oneview_network_set_info instead.
|
warning_text: Use community.general.oneview_network_set_info instead.
|
||||||
oneview_san_manager_facts:
|
oneview_san_manager_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.oneview_san_manager_info instead.
|
warning_text: Use community.general.oneview_san_manager_info instead.
|
||||||
online_server_facts:
|
online_server_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.online_server_info instead.
|
warning_text: Use community.general.online_server_info instead.
|
||||||
online_user_facts:
|
online_user_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.online_user_info instead.
|
warning_text: Use community.general.online_user_info instead.
|
||||||
ovirt:
|
ovirt:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_vm instead.
|
warning_text: Use ovirt.ovirt.ovirt_vm instead.
|
||||||
ovirt_affinity_label_facts:
|
ovirt_affinity_label_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_affinity_label_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_affinity_label_info instead.
|
||||||
ovirt_api_facts:
|
ovirt_api_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_api_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_api_info instead.
|
||||||
ovirt_cluster_facts:
|
ovirt_cluster_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_cluster_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_cluster_info instead.
|
||||||
ovirt_datacenter_facts:
|
ovirt_datacenter_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_datacenter_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_datacenter_info instead.
|
||||||
ovirt_disk_facts:
|
ovirt_disk_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_disk_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_disk_info instead.
|
||||||
ovirt_event_facts:
|
ovirt_event_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_event_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_event_info instead.
|
||||||
ovirt_external_provider_facts:
|
ovirt_external_provider_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_external_provider_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_external_provider_info instead.
|
||||||
ovirt_group_facts:
|
ovirt_group_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_group_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_group_info instead.
|
||||||
ovirt_host_facts:
|
ovirt_host_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_host_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_host_info instead.
|
||||||
ovirt_host_storage_facts:
|
ovirt_host_storage_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_host_storage_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_host_storage_info instead.
|
||||||
ovirt_network_facts:
|
ovirt_network_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_network_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_network_info instead.
|
||||||
ovirt_nic_facts:
|
ovirt_nic_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_nic_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_nic_info instead.
|
||||||
ovirt_permission_facts:
|
ovirt_permission_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_permission_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_permission_info instead.
|
||||||
ovirt_quota_facts:
|
ovirt_quota_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_quota_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_quota_info instead.
|
||||||
ovirt_scheduling_policy_facts:
|
ovirt_scheduling_policy_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_scheduling_policy_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_scheduling_policy_info instead.
|
||||||
ovirt_snapshot_facts:
|
ovirt_snapshot_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_snapshot_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_snapshot_info instead.
|
||||||
ovirt_storage_domain_facts:
|
ovirt_storage_domain_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_storage_domain_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_storage_domain_info instead.
|
||||||
ovirt_storage_template_facts:
|
ovirt_storage_template_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_storage_template_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_storage_template_info instead.
|
||||||
ovirt_storage_vm_facts:
|
ovirt_storage_vm_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_storage_vm_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_storage_vm_info instead.
|
||||||
ovirt_tag_facts:
|
ovirt_tag_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_tag_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_tag_info instead.
|
||||||
ovirt_template_facts:
|
ovirt_template_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_template_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_template_info instead.
|
||||||
ovirt_user_facts:
|
ovirt_user_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_user_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_user_info instead.
|
||||||
ovirt_vm_facts:
|
ovirt_vm_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_vm_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_vm_info instead.
|
||||||
ovirt_vmpool_facts:
|
ovirt_vmpool_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use ovirt.ovirt.ovirt_vmpool_info instead.
|
warning_text: Use ovirt.ovirt.ovirt_vmpool_info instead.
|
||||||
postgresql_copy:
|
postgresql_copy:
|
||||||
@@ -471,52 +536,52 @@ plugin_routing:
|
|||||||
redirect: community.postgresql.postgresql_table
|
redirect: community.postgresql.postgresql_table
|
||||||
postgresql_tablespace:
|
postgresql_tablespace:
|
||||||
redirect: community.postgresql.postgresql_tablespace
|
redirect: community.postgresql.postgresql_tablespace
|
||||||
postgresql_user_obj_stat_info:
|
|
||||||
redirect: community.postgresql.postgresql_user_obj_stat_info
|
|
||||||
postgresql_user:
|
postgresql_user:
|
||||||
redirect: community.postgresql.postgresql_user
|
redirect: community.postgresql.postgresql_user
|
||||||
|
postgresql_user_obj_stat_info:
|
||||||
|
redirect: community.postgresql.postgresql_user_obj_stat_info
|
||||||
purefa_facts:
|
purefa_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use purestorage.flasharray.purefa_info instead.
|
warning_text: Use purestorage.flasharray.purefa_info instead.
|
||||||
purefb_facts:
|
purefb_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use purestorage.flashblade.purefb_info instead.
|
warning_text: Use purestorage.flashblade.purefb_info instead.
|
||||||
python_requirements_facts:
|
python_requirements_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.python_requirements_info instead.
|
warning_text: Use community.general.python_requirements_info instead.
|
||||||
redfish_facts:
|
redfish_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.redfish_info instead.
|
warning_text: Use community.general.redfish_info instead.
|
||||||
scaleway_image_facts:
|
scaleway_image_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.scaleway_image_info instead.
|
warning_text: Use community.general.scaleway_image_info instead.
|
||||||
scaleway_ip_facts:
|
scaleway_ip_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.scaleway_ip_info instead.
|
warning_text: Use community.general.scaleway_ip_info instead.
|
||||||
scaleway_organization_facts:
|
scaleway_organization_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.scaleway_organization_info instead.
|
warning_text: Use community.general.scaleway_organization_info instead.
|
||||||
scaleway_security_group_facts:
|
scaleway_security_group_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.scaleway_security_group_info instead.
|
warning_text: Use community.general.scaleway_security_group_info instead.
|
||||||
scaleway_server_facts:
|
scaleway_server_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.scaleway_server_info instead.
|
warning_text: Use community.general.scaleway_server_info instead.
|
||||||
scaleway_snapshot_facts:
|
scaleway_snapshot_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.scaleway_snapshot_info instead.
|
warning_text: Use community.general.scaleway_snapshot_info instead.
|
||||||
scaleway_volume_facts:
|
scaleway_volume_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.scaleway_volume_info instead.
|
warning_text: Use community.general.scaleway_volume_info instead.
|
||||||
sf_account_manager:
|
sf_account_manager:
|
||||||
@@ -540,15 +605,15 @@ plugin_routing:
|
|||||||
removal_version: 2.0.0
|
removal_version: 2.0.0
|
||||||
warning_text: Use netapp.elementsw.na_elementsw_volume instead.
|
warning_text: Use netapp.elementsw.na_elementsw_volume instead.
|
||||||
smartos_image_facts:
|
smartos_image_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.smartos_image_info instead.
|
warning_text: Use community.general.smartos_image_info instead.
|
||||||
vertica_facts:
|
vertica_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.vertica_info instead.
|
warning_text: Use community.general.vertica_info instead.
|
||||||
xenserver_guest_facts:
|
xenserver_guest_facts:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.xenserver_guest_info instead.
|
warning_text: Use community.general.xenserver_guest_info instead.
|
||||||
doc_fragments:
|
doc_fragments:
|
||||||
@@ -562,6 +627,11 @@ plugin_routing:
|
|||||||
redirect: community.kubevirt.kubevirt_common_options
|
redirect: community.kubevirt.kubevirt_common_options
|
||||||
kubevirt_vm_options:
|
kubevirt_vm_options:
|
||||||
redirect: community.kubevirt.kubevirt_vm_options
|
redirect: community.kubevirt.kubevirt_vm_options
|
||||||
|
nios:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.nios document fragment has been deprecated.
|
||||||
|
Please use infoblox.nios_modules.nios instead.
|
||||||
postgresql:
|
postgresql:
|
||||||
redirect: community.postgresql.postgresql
|
redirect: community.postgresql.postgresql
|
||||||
module_utils:
|
module_utils:
|
||||||
@@ -579,21 +649,33 @@ plugin_routing:
|
|||||||
redirect: community.hrobot.robot
|
redirect: community.hrobot.robot
|
||||||
kubevirt:
|
kubevirt:
|
||||||
redirect: community.kubevirt.kubevirt
|
redirect: community.kubevirt.kubevirt
|
||||||
|
net_tools.nios.api:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 5.0.0
|
||||||
|
warning_text: The community.general.net_tools.nios.api module_utils has been
|
||||||
|
deprecated. Please use infoblox.nios_modules.api instead.
|
||||||
postgresql:
|
postgresql:
|
||||||
redirect: community.postgresql.postgresql
|
redirect: community.postgresql.postgresql
|
||||||
|
remote_management.dellemc.dellemc_idrac:
|
||||||
|
redirect: dellemc.openmanage.dellemc_idrac
|
||||||
|
remote_management.dellemc.ome:
|
||||||
|
redirect: dellemc.openmanage.ome
|
||||||
callback:
|
callback:
|
||||||
actionable:
|
actionable:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 2.0.0
|
removal_version: 2.0.0
|
||||||
warning_text: Use the 'default' callback plugin with 'display_skipped_hosts = no' and 'display_ok_hosts = no' options.
|
warning_text: Use the 'default' callback plugin with 'display_skipped_hosts
|
||||||
|
= no' and 'display_ok_hosts = no' options.
|
||||||
full_skip:
|
full_skip:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 2.0.0
|
removal_version: 2.0.0
|
||||||
warning_text: Use the 'default' callback plugin with 'display_skipped_hosts = no' option.
|
warning_text: Use the 'default' callback plugin with 'display_skipped_hosts
|
||||||
|
= no' option.
|
||||||
stderr:
|
stderr:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 2.0.0
|
removal_version: 2.0.0
|
||||||
warning_text: Use the 'default' callback plugin with 'display_failed_stderr = yes' option.
|
warning_text: Use the 'default' callback plugin with 'display_failed_stderr
|
||||||
|
= yes' option.
|
||||||
inventory:
|
inventory:
|
||||||
docker_machine:
|
docker_machine:
|
||||||
redirect: community.docker.docker_machine
|
redirect: community.docker.docker_machine
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Copyright: (c) 2020, quidame <quidame@poivron.org>
|
# Copyright: (c) 2020, quidame <quidame@poivron.org>
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Copyright: (c) 2020, Amin Vakil <info@aminvakil.com>
|
# Copyright: (c) 2020, Amin Vakil <info@aminvakil.com>
|
||||||
# Copyright: (c) 2016-2018, Matt Davis <mdavis@ansible.com>
|
# Copyright: (c) 2016-2018, Matt Davis <mdavis@ansible.com>
|
||||||
# Copyright: (c) 2018, Sam Doran <sdoran@redhat.com>
|
# Copyright: (c) 2018, Sam Doran <sdoran@redhat.com>
|
||||||
@@ -8,7 +7,7 @@ from __future__ import (absolute_import, division, print_function)
|
|||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
from ansible.errors import AnsibleError, AnsibleConnectionFailure
|
from ansible.errors import AnsibleError, AnsibleConnectionFailure
|
||||||
from ansible.module_utils._text import to_native, to_text
|
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||||
from ansible.module_utils.common.collections import is_string
|
from ansible.module_utils.common.collections import is_string
|
||||||
from ansible.plugins.action import ActionBase
|
from ansible.plugins.action import ActionBase
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|||||||
@@ -81,7 +81,7 @@ DOCUMENTATION = '''
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from ansible.module_utils._text import to_bytes
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
from ansible.plugins.become import BecomeBase
|
from ansible.plugins.become import BecomeBase
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -82,7 +82,7 @@ DOCUMENTATION = '''
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from ansible.module_utils._text import to_bytes
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
from ansible.plugins.become import BecomeBase
|
from ansible.plugins.become import BecomeBase
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
1
plugins/cache/memcached.py
vendored
1
plugins/cache/memcached.py
vendored
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (c) 2014, Brian Coca, Josh Drake, et al
|
# (c) 2014, Brian Coca, Josh Drake, et al
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|||||||
1
plugins/cache/pickle.py
vendored
1
plugins/cache/pickle.py
vendored
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (c) 2017, Brian Coca
|
# (c) 2017, Brian Coca
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|||||||
3
plugins/cache/redis.py
vendored
3
plugins/cache/redis.py
vendored
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (c) 2014, Brian Coca, Josh Drake, et al
|
# (c) 2014, Brian Coca, Josh Drake, et al
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
@@ -68,7 +67,7 @@ import json
|
|||||||
|
|
||||||
from ansible import constants as C
|
from ansible import constants as C
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils._text import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
|
from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
|
||||||
from ansible.plugins.cache import BaseCacheModule
|
from ansible.plugins.cache import BaseCacheModule
|
||||||
from ansible.release import __version__ as ansible_base_version
|
from ansible.release import __version__ as ansible_base_version
|
||||||
|
|||||||
1
plugins/cache/yaml.py
vendored
1
plugins/cache/yaml.py
vendored
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (c) 2017, Brian Coca
|
# (c) 2017, Brian Coca
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (C) 2012, Michael DeHaan, <michael.dehaan@gmail.com>
|
# (C) 2012, Michael DeHaan, <michael.dehaan@gmail.com>
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (c) 2018, Ivan Aragones Muniesa <ivan.aragones.muniesa@gmail.com>
|
# (c) 2018, Ivan Aragones Muniesa <ivan.aragones.muniesa@gmail.com>
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
'''
|
'''
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (c) 2016, Dag Wieers <dag@wieers.com>
|
# (c) 2016, Dag Wieers <dag@wieers.com>
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|||||||
@@ -792,7 +792,7 @@ from ansible.utils.color import colorize, hostcolor
|
|||||||
from ansible.template import Templar
|
from ansible.template import Templar
|
||||||
from ansible.vars.manager import VariableManager
|
from ansible.vars.manager import VariableManager
|
||||||
from ansible.plugins.callback.default import CallbackModule as Default
|
from ansible.plugins.callback.default import CallbackModule as Default
|
||||||
from ansible.module_utils._text import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
|
||||||
|
|
||||||
class DummyStdout(object):
|
class DummyStdout(object):
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (C) 2014, Matt Martz <matt@sivel.net>
|
# (C) 2014, Matt Martz <matt@sivel.net>
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Copyright (C) 2016 maxn nikolaev.makc@gmail.com
|
# Copyright (C) 2016 maxn nikolaev.makc@gmail.com
|
||||||
# Copyright (c) 2017 Ansible Project
|
# Copyright (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (C) 2012, Michael DeHaan, <michael.dehaan@gmail.com>
|
# (C) 2012, Michael DeHaan, <michael.dehaan@gmail.com>
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
@@ -32,7 +31,7 @@ import time
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
from ansible.utils.path import makedirs_safe
|
from ansible.utils.path import makedirs_safe
|
||||||
from ansible.module_utils._text import to_bytes
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
from ansible.module_utils.common._collections_compat import MutableMapping
|
from ansible.module_utils.common._collections_compat import MutableMapping
|
||||||
from ansible.parsing.ajson import AnsibleJSONEncoder
|
from ansible.parsing.ajson import AnsibleJSONEncoder
|
||||||
from ansible.plugins.callback import CallbackBase
|
from ansible.plugins.callback import CallbackBase
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (c) 2018, Samir Musali <samir.musali@logdna.com>
|
# (c) 2018, Samir Musali <samir.musali@logdna.com>
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (c) 2015, Logentries.com, Jimmy Tang <jimmy.tang@logentries.com>
|
# (c) 2015, Logentries.com, Jimmy Tang <jimmy.tang@logentries.com>
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
@@ -112,7 +111,7 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
HAS_FLATDICT = False
|
HAS_FLATDICT = False
|
||||||
|
|
||||||
from ansible.module_utils._text import to_bytes, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
from ansible.plugins.callback import CallbackBase
|
from ansible.plugins.callback import CallbackBase
|
||||||
|
|
||||||
# Todo:
|
# Todo:
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (C) 2020, Yevhen Khmelenko <ujenmr@gmail.com>
|
# (C) 2020, Yevhen Khmelenko <ujenmr@gmail.com>
|
||||||
# (C) 2017 Ansible Project
|
# (C) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ import re
|
|||||||
import smtplib
|
import smtplib
|
||||||
|
|
||||||
from ansible.module_utils.six import string_types
|
from ansible.module_utils.six import string_types
|
||||||
from ansible.module_utils._text import to_bytes
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
from ansible.parsing.ajson import AnsibleJSONEncoder
|
from ansible.parsing.ajson import AnsibleJSONEncoder
|
||||||
from ansible.plugins.callback import CallbackBase
|
from ansible.plugins.callback import CallbackBase
|
||||||
|
|
||||||
|
|||||||
@@ -10,22 +10,23 @@ DOCUMENTATION = '''
|
|||||||
name: nrdp
|
name: nrdp
|
||||||
type: notification
|
type: notification
|
||||||
author: "Remi VERCHERE (@rverchere)"
|
author: "Remi VERCHERE (@rverchere)"
|
||||||
short_description: post task result to a nagios server through nrdp
|
short_description: Post task results to a Nagios server through nrdp
|
||||||
description:
|
description:
|
||||||
- this callback send playbook result to nagios
|
- This callback send playbook result to Nagios.
|
||||||
- nagios shall use NRDP to recive passive events
|
- Nagios shall use NRDP to recive passive events.
|
||||||
- the passive check is sent to a dedicated host/service for ansible
|
- The passive check is sent to a dedicated host/service for Ansible.
|
||||||
options:
|
options:
|
||||||
url:
|
url:
|
||||||
description: url of the nrdp server
|
description: URL of the nrdp server.
|
||||||
required: True
|
required: true
|
||||||
env:
|
env:
|
||||||
- name : NRDP_URL
|
- name : NRDP_URL
|
||||||
ini:
|
ini:
|
||||||
- section: callback_nrdp
|
- section: callback_nrdp
|
||||||
key: url
|
key: url
|
||||||
|
type: string
|
||||||
validate_certs:
|
validate_certs:
|
||||||
description: (bool) validate the SSL certificate of the nrdp server. (For HTTPS url)
|
description: Validate the SSL certificate of the nrdp server. (Used for HTTPS URLs.)
|
||||||
env:
|
env:
|
||||||
- name: NRDP_VALIDATE_CERTS
|
- name: NRDP_VALIDATE_CERTS
|
||||||
ini:
|
ini:
|
||||||
@@ -33,32 +34,36 @@ DOCUMENTATION = '''
|
|||||||
key: validate_nrdp_certs
|
key: validate_nrdp_certs
|
||||||
- section: callback_nrdp
|
- section: callback_nrdp
|
||||||
key: validate_certs
|
key: validate_certs
|
||||||
default: False
|
type: boolean
|
||||||
|
default: false
|
||||||
aliases: [ validate_nrdp_certs ]
|
aliases: [ validate_nrdp_certs ]
|
||||||
token:
|
token:
|
||||||
description: token to be allowed to push nrdp events
|
description: Token to be allowed to push nrdp events.
|
||||||
required: True
|
required: true
|
||||||
env:
|
env:
|
||||||
- name: NRDP_TOKEN
|
- name: NRDP_TOKEN
|
||||||
ini:
|
ini:
|
||||||
- section: callback_nrdp
|
- section: callback_nrdp
|
||||||
key: token
|
key: token
|
||||||
|
type: string
|
||||||
hostname:
|
hostname:
|
||||||
description: hostname where the passive check is linked to
|
description: Hostname where the passive check is linked to.
|
||||||
required: True
|
required: true
|
||||||
env:
|
env:
|
||||||
- name : NRDP_HOSTNAME
|
- name : NRDP_HOSTNAME
|
||||||
ini:
|
ini:
|
||||||
- section: callback_nrdp
|
- section: callback_nrdp
|
||||||
key: hostname
|
key: hostname
|
||||||
|
type: string
|
||||||
servicename:
|
servicename:
|
||||||
description: service where the passive check is linked to
|
description: Service where the passive check is linked to.
|
||||||
required: True
|
required: true
|
||||||
env:
|
env:
|
||||||
- name : NRDP_SERVICENAME
|
- name : NRDP_SERVICENAME
|
||||||
ini:
|
ini:
|
||||||
- section: callback_nrdp
|
- section: callback_nrdp
|
||||||
key: servicename
|
key: servicename
|
||||||
|
type: string
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (c) 2012, Michael DeHaan, <michael.dehaan@gmail.com>
|
# (c) 2012, Michael DeHaan, <michael.dehaan@gmail.com>
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (c) Fastly, inc 2016
|
# (c) Fastly, inc 2016
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
@@ -41,7 +40,7 @@ import difflib
|
|||||||
|
|
||||||
from ansible import constants as C
|
from ansible import constants as C
|
||||||
from ansible.plugins.callback import CallbackBase
|
from ansible.plugins.callback import CallbackBase
|
||||||
from ansible.module_utils._text import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
|
||||||
try:
|
try:
|
||||||
codeCodes = C.COLOR_CODES
|
codeCodes = C.COLOR_CODES
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (C) 2014-2015, Matt Martz <matt@sivel.net>
|
# (C) 2014-2015, Matt Martz <matt@sivel.net>
|
||||||
# (C) 2017 Ansible Project
|
# (C) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
@@ -59,7 +58,7 @@ import os
|
|||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from ansible import context
|
from ansible import context
|
||||||
from ansible.module_utils._text import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
from ansible.module_utils.urls import open_url
|
from ansible.module_utils.urls import open_url
|
||||||
from ansible.plugins.callback import CallbackBase
|
from ansible.plugins.callback import CallbackBase
|
||||||
|
|
||||||
|
|||||||
@@ -68,6 +68,16 @@ DOCUMENTATION = '''
|
|||||||
type: bool
|
type: bool
|
||||||
default: false
|
default: false
|
||||||
version_added: 2.0.0
|
version_added: 2.0.0
|
||||||
|
batch:
|
||||||
|
description:
|
||||||
|
- Correlation ID which can be set across multiple playbook executions.
|
||||||
|
env:
|
||||||
|
- name: SPLUNK_BATCH
|
||||||
|
ini:
|
||||||
|
- section: callback_splunk
|
||||||
|
key: batch
|
||||||
|
type: str
|
||||||
|
version_added: 3.3.0
|
||||||
'''
|
'''
|
||||||
|
|
||||||
EXAMPLES = '''
|
EXAMPLES = '''
|
||||||
@@ -107,7 +117,7 @@ class SplunkHTTPCollectorSource(object):
|
|||||||
self.ip_address = socket.gethostbyname(socket.gethostname())
|
self.ip_address = socket.gethostbyname(socket.gethostname())
|
||||||
self.user = getpass.getuser()
|
self.user = getpass.getuser()
|
||||||
|
|
||||||
def send_event(self, url, authtoken, validate_certs, include_milliseconds, state, result, runtime):
|
def send_event(self, url, authtoken, validate_certs, include_milliseconds, batch, state, result, runtime):
|
||||||
if result._task_fields['args'].get('_ansible_check_mode') is True:
|
if result._task_fields['args'].get('_ansible_check_mode') is True:
|
||||||
self.ansible_check_mode = True
|
self.ansible_check_mode = True
|
||||||
|
|
||||||
@@ -126,6 +136,8 @@ class SplunkHTTPCollectorSource(object):
|
|||||||
data = {}
|
data = {}
|
||||||
data['uuid'] = result._task._uuid
|
data['uuid'] = result._task._uuid
|
||||||
data['session'] = self.session
|
data['session'] = self.session
|
||||||
|
if batch is not None:
|
||||||
|
data['batch'] = batch
|
||||||
data['status'] = state
|
data['status'] = state
|
||||||
|
|
||||||
if include_milliseconds:
|
if include_milliseconds:
|
||||||
@@ -175,6 +187,7 @@ class CallbackModule(CallbackBase):
|
|||||||
self.authtoken = None
|
self.authtoken = None
|
||||||
self.validate_certs = None
|
self.validate_certs = None
|
||||||
self.include_milliseconds = None
|
self.include_milliseconds = None
|
||||||
|
self.batch = None
|
||||||
self.splunk = SplunkHTTPCollectorSource()
|
self.splunk = SplunkHTTPCollectorSource()
|
||||||
|
|
||||||
def _runtime(self, result):
|
def _runtime(self, result):
|
||||||
@@ -212,6 +225,8 @@ class CallbackModule(CallbackBase):
|
|||||||
|
|
||||||
self.include_milliseconds = self.get_option('include_milliseconds')
|
self.include_milliseconds = self.get_option('include_milliseconds')
|
||||||
|
|
||||||
|
self.batch = self.get_option('batch')
|
||||||
|
|
||||||
def v2_playbook_on_start(self, playbook):
|
def v2_playbook_on_start(self, playbook):
|
||||||
self.splunk.ansible_playbook = basename(playbook._file_name)
|
self.splunk.ansible_playbook = basename(playbook._file_name)
|
||||||
|
|
||||||
@@ -227,6 +242,7 @@ class CallbackModule(CallbackBase):
|
|||||||
self.authtoken,
|
self.authtoken,
|
||||||
self.validate_certs,
|
self.validate_certs,
|
||||||
self.include_milliseconds,
|
self.include_milliseconds,
|
||||||
|
self.batch,
|
||||||
'OK',
|
'OK',
|
||||||
result,
|
result,
|
||||||
self._runtime(result)
|
self._runtime(result)
|
||||||
@@ -238,6 +254,7 @@ class CallbackModule(CallbackBase):
|
|||||||
self.authtoken,
|
self.authtoken,
|
||||||
self.validate_certs,
|
self.validate_certs,
|
||||||
self.include_milliseconds,
|
self.include_milliseconds,
|
||||||
|
self.batch,
|
||||||
'SKIPPED',
|
'SKIPPED',
|
||||||
result,
|
result,
|
||||||
self._runtime(result)
|
self._runtime(result)
|
||||||
@@ -249,6 +266,7 @@ class CallbackModule(CallbackBase):
|
|||||||
self.authtoken,
|
self.authtoken,
|
||||||
self.validate_certs,
|
self.validate_certs,
|
||||||
self.include_milliseconds,
|
self.include_milliseconds,
|
||||||
|
self.batch,
|
||||||
'FAILED',
|
'FAILED',
|
||||||
result,
|
result,
|
||||||
self._runtime(result)
|
self._runtime(result)
|
||||||
@@ -260,6 +278,7 @@ class CallbackModule(CallbackBase):
|
|||||||
self.authtoken,
|
self.authtoken,
|
||||||
self.validate_certs,
|
self.validate_certs,
|
||||||
self.include_milliseconds,
|
self.include_milliseconds,
|
||||||
|
self.batch,
|
||||||
'FAILED',
|
'FAILED',
|
||||||
result,
|
result,
|
||||||
self._runtime(result)
|
self._runtime(result)
|
||||||
@@ -271,6 +290,7 @@ class CallbackModule(CallbackBase):
|
|||||||
self.authtoken,
|
self.authtoken,
|
||||||
self.validate_certs,
|
self.validate_certs,
|
||||||
self.include_milliseconds,
|
self.include_milliseconds,
|
||||||
|
self.batch,
|
||||||
'UNREACHABLE',
|
'UNREACHABLE',
|
||||||
result,
|
result,
|
||||||
self._runtime(result)
|
self._runtime(result)
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Copyright: (c) 2017, Allyson Bowles <@akatch>
|
# Copyright: (c) 2017, Allyson Bowles <@akatch>
|
||||||
# Copyright: (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
# Copyright: (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
@@ -23,7 +22,7 @@ DOCUMENTATION = '''
|
|||||||
from os.path import basename
|
from os.path import basename
|
||||||
from ansible import constants as C
|
from ansible import constants as C
|
||||||
from ansible import context
|
from ansible import context
|
||||||
from ansible.module_utils._text import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
from ansible.utils.color import colorize, hostcolor
|
from ansible.utils.color import colorize, hostcolor
|
||||||
from ansible.plugins.callback.default import CallbackModule as CallbackModule_default
|
from ansible.plugins.callback.default import CallbackModule as CallbackModule_default
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
@@ -26,7 +25,7 @@ import re
|
|||||||
import string
|
import string
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from ansible.module_utils._text import to_bytes, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
from ansible.module_utils.six import string_types
|
from ansible.module_utils.six import string_types
|
||||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||||
from ansible.plugins.callback import CallbackBase, strip_internal_keys, module_response_deepcopy
|
from ansible.plugins.callback import CallbackBase, strip_internal_keys, module_response_deepcopy
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
# Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
||||||
#
|
#
|
||||||
# (c) 2013, Maykel Moya <mmoya@speedyrails.com>
|
# (c) 2013, Maykel Moya <mmoya@speedyrails.com>
|
||||||
@@ -55,7 +54,7 @@ from ansible.errors import AnsibleError
|
|||||||
from ansible.module_utils.basic import is_executable
|
from ansible.module_utils.basic import is_executable
|
||||||
from ansible.module_utils.common.process import get_bin_path
|
from ansible.module_utils.common.process import get_bin_path
|
||||||
from ansible.module_utils.six.moves import shlex_quote
|
from ansible.module_utils.six.moves import shlex_quote
|
||||||
from ansible.module_utils._text import to_bytes, to_native
|
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
||||||
from ansible.plugins.connection import ConnectionBase, BUFSIZE
|
from ansible.plugins.connection import ConnectionBase, BUFSIZE
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
@@ -63,7 +62,7 @@ display = Display()
|
|||||||
|
|
||||||
|
|
||||||
class Connection(ConnectionBase):
|
class Connection(ConnectionBase):
|
||||||
''' Local chroot based connections '''
|
""" Local chroot based connections """
|
||||||
|
|
||||||
transport = 'community.general.chroot'
|
transport = 'community.general.chroot'
|
||||||
has_pipelining = True
|
has_pipelining = True
|
||||||
@@ -96,7 +95,7 @@ class Connection(ConnectionBase):
|
|||||||
raise AnsibleError("%s does not look like a chrootable dir (/bin/sh missing)" % self.chroot)
|
raise AnsibleError("%s does not look like a chrootable dir (/bin/sh missing)" % self.chroot)
|
||||||
|
|
||||||
def _connect(self):
|
def _connect(self):
|
||||||
''' connect to the chroot '''
|
""" connect to the chroot """
|
||||||
if os.path.isabs(self.get_option('chroot_exe')):
|
if os.path.isabs(self.get_option('chroot_exe')):
|
||||||
self.chroot_cmd = self.get_option('chroot_exe')
|
self.chroot_cmd = self.get_option('chroot_exe')
|
||||||
else:
|
else:
|
||||||
@@ -111,17 +110,17 @@ class Connection(ConnectionBase):
|
|||||||
self._connected = True
|
self._connected = True
|
||||||
|
|
||||||
def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE):
|
def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE):
|
||||||
''' run a command on the chroot. This is only needed for implementing
|
""" run a command on the chroot. This is only needed for implementing
|
||||||
put_file() get_file() so that we don't have to read the whole file
|
put_file() get_file() so that we don't have to read the whole file
|
||||||
into memory.
|
into memory.
|
||||||
|
|
||||||
compared to exec_command() it looses some niceties like being able to
|
compared to exec_command() it looses some niceties like being able to
|
||||||
return the process's exit code immediately.
|
return the process's exit code immediately.
|
||||||
'''
|
"""
|
||||||
executable = self.get_option('executable')
|
executable = self.get_option('executable')
|
||||||
local_cmd = [self.chroot_cmd, self.chroot, executable, '-c', cmd]
|
local_cmd = [self.chroot_cmd, self.chroot, executable, '-c', cmd]
|
||||||
|
|
||||||
display.vvv("EXEC %s" % (local_cmd), host=self.chroot)
|
display.vvv("EXEC %s" % local_cmd, host=self.chroot)
|
||||||
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
||||||
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
|
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
@@ -129,16 +128,17 @@ class Connection(ConnectionBase):
|
|||||||
return p
|
return p
|
||||||
|
|
||||||
def exec_command(self, cmd, in_data=None, sudoable=False):
|
def exec_command(self, cmd, in_data=None, sudoable=False):
|
||||||
''' run a command on the chroot '''
|
""" run a command on the chroot """
|
||||||
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
||||||
|
|
||||||
p = self._buffered_exec_command(cmd)
|
p = self._buffered_exec_command(cmd)
|
||||||
|
|
||||||
stdout, stderr = p.communicate(in_data)
|
stdout, stderr = p.communicate(in_data)
|
||||||
return (p.returncode, stdout, stderr)
|
return p.returncode, stdout, stderr
|
||||||
|
|
||||||
def _prefix_login_path(self, remote_path):
|
@staticmethod
|
||||||
''' Make sure that we put files into a standard path
|
def _prefix_login_path(remote_path):
|
||||||
|
""" Make sure that we put files into a standard path
|
||||||
|
|
||||||
If a path is relative, then we need to choose where to put it.
|
If a path is relative, then we need to choose where to put it.
|
||||||
ssh chooses $HOME but we aren't guaranteed that a home dir will
|
ssh chooses $HOME but we aren't guaranteed that a home dir will
|
||||||
@@ -146,13 +146,13 @@ class Connection(ConnectionBase):
|
|||||||
This also happens to be the former default.
|
This also happens to be the former default.
|
||||||
|
|
||||||
Can revisit using $HOME instead if it's a problem
|
Can revisit using $HOME instead if it's a problem
|
||||||
'''
|
"""
|
||||||
if not remote_path.startswith(os.path.sep):
|
if not remote_path.startswith(os.path.sep):
|
||||||
remote_path = os.path.join(os.path.sep, remote_path)
|
remote_path = os.path.join(os.path.sep, remote_path)
|
||||||
return os.path.normpath(remote_path)
|
return os.path.normpath(remote_path)
|
||||||
|
|
||||||
def put_file(self, in_path, out_path):
|
def put_file(self, in_path, out_path):
|
||||||
''' transfer a file from local to chroot '''
|
""" transfer a file from local to chroot """
|
||||||
super(Connection, self).put_file(in_path, out_path)
|
super(Connection, self).put_file(in_path, out_path)
|
||||||
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.chroot)
|
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.chroot)
|
||||||
|
|
||||||
@@ -178,7 +178,7 @@ class Connection(ConnectionBase):
|
|||||||
raise AnsibleError("file or module does not exist at: %s" % in_path)
|
raise AnsibleError("file or module does not exist at: %s" % in_path)
|
||||||
|
|
||||||
def fetch_file(self, in_path, out_path):
|
def fetch_file(self, in_path, out_path):
|
||||||
''' fetch a file from chroot to local '''
|
""" fetch a file from chroot to local """
|
||||||
super(Connection, self).fetch_file(in_path, out_path)
|
super(Connection, self).fetch_file(in_path, out_path)
|
||||||
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.chroot)
|
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.chroot)
|
||||||
|
|
||||||
@@ -202,6 +202,6 @@ class Connection(ConnectionBase):
|
|||||||
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
|
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
''' terminate the connection; nothing to do here '''
|
""" terminate the connection; nothing to do here """
|
||||||
super(Connection, self).close()
|
super(Connection, self).close()
|
||||||
self._connected = False
|
self._connected = False
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
# Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
||||||
# Based on chroot.py (c) 2013, Maykel Moya <mmoya@speedyrails.com>
|
# Based on chroot.py (c) 2013, Maykel Moya <mmoya@speedyrails.com>
|
||||||
# Copyright (c) 2013, Michael Scherer <misc@zarb.org>
|
# Copyright (c) 2013, Michael Scherer <misc@zarb.org>
|
||||||
@@ -45,7 +44,7 @@ display = Display()
|
|||||||
|
|
||||||
|
|
||||||
class Connection(ConnectionBase):
|
class Connection(ConnectionBase):
|
||||||
''' Func-based connections '''
|
""" Func-based connections """
|
||||||
|
|
||||||
has_pipelining = False
|
has_pipelining = False
|
||||||
|
|
||||||
@@ -54,6 +53,7 @@ class Connection(ConnectionBase):
|
|||||||
self.host = host
|
self.host = host
|
||||||
# port is unused, this go on func
|
# port is unused, this go on func
|
||||||
self.port = port
|
self.port = port
|
||||||
|
self.client = None
|
||||||
|
|
||||||
def connect(self, port=None):
|
def connect(self, port=None):
|
||||||
if not HAVE_FUNC:
|
if not HAVE_FUNC:
|
||||||
@@ -63,31 +63,32 @@ class Connection(ConnectionBase):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def exec_command(self, cmd, become_user=None, sudoable=False, executable='/bin/sh', in_data=None):
|
def exec_command(self, cmd, become_user=None, sudoable=False, executable='/bin/sh', in_data=None):
|
||||||
''' run a command on the remote minion '''
|
""" run a command on the remote minion """
|
||||||
|
|
||||||
if in_data:
|
if in_data:
|
||||||
raise AnsibleError("Internal Error: this module does not support optimized module pipelining")
|
raise AnsibleError("Internal Error: this module does not support optimized module pipelining")
|
||||||
|
|
||||||
# totally ignores privlege escalation
|
# totally ignores privlege escalation
|
||||||
display.vvv("EXEC %s" % (cmd), host=self.host)
|
display.vvv("EXEC %s" % cmd, host=self.host)
|
||||||
p = self.client.command.run(cmd)[self.host]
|
p = self.client.command.run(cmd)[self.host]
|
||||||
return (p[0], p[1], p[2])
|
return p[0], p[1], p[2]
|
||||||
|
|
||||||
def _normalize_path(self, path, prefix):
|
@staticmethod
|
||||||
|
def _normalize_path(path, prefix):
|
||||||
if not path.startswith(os.path.sep):
|
if not path.startswith(os.path.sep):
|
||||||
path = os.path.join(os.path.sep, path)
|
path = os.path.join(os.path.sep, path)
|
||||||
normpath = os.path.normpath(path)
|
normpath = os.path.normpath(path)
|
||||||
return os.path.join(prefix, normpath[1:])
|
return os.path.join(prefix, normpath[1:])
|
||||||
|
|
||||||
def put_file(self, in_path, out_path):
|
def put_file(self, in_path, out_path):
|
||||||
''' transfer a file from local to remote '''
|
""" transfer a file from local to remote """
|
||||||
|
|
||||||
out_path = self._normalize_path(out_path, '/')
|
out_path = self._normalize_path(out_path, '/')
|
||||||
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
|
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
|
||||||
self.client.local.copyfile.send(in_path, out_path)
|
self.client.local.copyfile.send(in_path, out_path)
|
||||||
|
|
||||||
def fetch_file(self, in_path, out_path):
|
def fetch_file(self, in_path, out_path):
|
||||||
''' fetch a file from remote to local '''
|
""" fetch a file from remote to local """
|
||||||
|
|
||||||
in_path = self._normalize_path(in_path, '/')
|
in_path = self._normalize_path(in_path, '/')
|
||||||
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host)
|
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host)
|
||||||
@@ -100,5 +101,5 @@ class Connection(ConnectionBase):
|
|||||||
shutil.rmtree(tmpdir)
|
shutil.rmtree(tmpdir)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
''' terminate the connection; nothing to do here '''
|
""" terminate the connection; nothing to do here """
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Based on jail.py
|
# Based on jail.py
|
||||||
# (c) 2013, Michael Scherer <misc@zarb.org>
|
# (c) 2013, Michael Scherer <misc@zarb.org>
|
||||||
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
|
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
|
||||||
@@ -33,7 +32,7 @@ DOCUMENTATION = '''
|
|||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from ansible_collections.community.general.plugins.connection.jail import Connection as Jail
|
from ansible_collections.community.general.plugins.connection.jail import Connection as Jail
|
||||||
from ansible.module_utils._text import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
@@ -41,7 +40,7 @@ display = Display()
|
|||||||
|
|
||||||
|
|
||||||
class Connection(Jail):
|
class Connection(Jail):
|
||||||
''' Local iocage based connections '''
|
""" Local iocage based connections """
|
||||||
|
|
||||||
transport = 'community.general.iocage'
|
transport = 'community.general.iocage'
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Based on local.py by Michael DeHaan <michael.dehaan@gmail.com>
|
# Based on local.py by Michael DeHaan <michael.dehaan@gmail.com>
|
||||||
# and chroot.py by Maykel Moya <mmoya@speedyrails.com>
|
# and chroot.py by Maykel Moya <mmoya@speedyrails.com>
|
||||||
# Copyright (c) 2013, Michael Scherer <misc@zarb.org>
|
# Copyright (c) 2013, Michael Scherer <misc@zarb.org>
|
||||||
@@ -36,11 +35,10 @@ import os
|
|||||||
import os.path
|
import os.path
|
||||||
import subprocess
|
import subprocess
|
||||||
import traceback
|
import traceback
|
||||||
import ansible.constants as C
|
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils.six.moves import shlex_quote
|
from ansible.module_utils.six.moves import shlex_quote
|
||||||
from ansible.module_utils._text import to_bytes, to_native, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||||
from ansible.plugins.connection import ConnectionBase, BUFSIZE
|
from ansible.plugins.connection import ConnectionBase, BUFSIZE
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
@@ -48,7 +46,7 @@ display = Display()
|
|||||||
|
|
||||||
|
|
||||||
class Connection(ConnectionBase):
|
class Connection(ConnectionBase):
|
||||||
''' Local BSD Jail based connections '''
|
""" Local BSD Jail based connections """
|
||||||
|
|
||||||
modified_jailname_key = 'conn_jail_name'
|
modified_jailname_key = 'conn_jail_name'
|
||||||
|
|
||||||
@@ -91,20 +89,20 @@ class Connection(ConnectionBase):
|
|||||||
return to_text(stdout, errors='surrogate_or_strict').split()
|
return to_text(stdout, errors='surrogate_or_strict').split()
|
||||||
|
|
||||||
def _connect(self):
|
def _connect(self):
|
||||||
''' connect to the jail; nothing to do here '''
|
""" connect to the jail; nothing to do here """
|
||||||
super(Connection, self)._connect()
|
super(Connection, self)._connect()
|
||||||
if not self._connected:
|
if not self._connected:
|
||||||
display.vvv(u"ESTABLISH JAIL CONNECTION FOR USER: {0}".format(self._play_context.remote_user), host=self.jail)
|
display.vvv(u"ESTABLISH JAIL CONNECTION FOR USER: {0}".format(self._play_context.remote_user), host=self.jail)
|
||||||
self._connected = True
|
self._connected = True
|
||||||
|
|
||||||
def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE):
|
def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE):
|
||||||
''' run a command on the jail. This is only needed for implementing
|
""" run a command on the jail. This is only needed for implementing
|
||||||
put_file() get_file() so that we don't have to read the whole file
|
put_file() get_file() so that we don't have to read the whole file
|
||||||
into memory.
|
into memory.
|
||||||
|
|
||||||
compared to exec_command() it looses some niceties like being able to
|
compared to exec_command() it looses some niceties like being able to
|
||||||
return the process's exit code immediately.
|
return the process's exit code immediately.
|
||||||
'''
|
"""
|
||||||
|
|
||||||
local_cmd = [self.jexec_cmd]
|
local_cmd = [self.jexec_cmd]
|
||||||
set_env = ''
|
set_env = ''
|
||||||
@@ -124,16 +122,17 @@ class Connection(ConnectionBase):
|
|||||||
return p
|
return p
|
||||||
|
|
||||||
def exec_command(self, cmd, in_data=None, sudoable=False):
|
def exec_command(self, cmd, in_data=None, sudoable=False):
|
||||||
''' run a command on the jail '''
|
""" run a command on the jail """
|
||||||
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
||||||
|
|
||||||
p = self._buffered_exec_command(cmd)
|
p = self._buffered_exec_command(cmd)
|
||||||
|
|
||||||
stdout, stderr = p.communicate(in_data)
|
stdout, stderr = p.communicate(in_data)
|
||||||
return (p.returncode, stdout, stderr)
|
return p.returncode, stdout, stderr
|
||||||
|
|
||||||
def _prefix_login_path(self, remote_path):
|
@staticmethod
|
||||||
''' Make sure that we put files into a standard path
|
def _prefix_login_path(remote_path):
|
||||||
|
""" Make sure that we put files into a standard path
|
||||||
|
|
||||||
If a path is relative, then we need to choose where to put it.
|
If a path is relative, then we need to choose where to put it.
|
||||||
ssh chooses $HOME but we aren't guaranteed that a home dir will
|
ssh chooses $HOME but we aren't guaranteed that a home dir will
|
||||||
@@ -141,13 +140,13 @@ class Connection(ConnectionBase):
|
|||||||
This also happens to be the former default.
|
This also happens to be the former default.
|
||||||
|
|
||||||
Can revisit using $HOME instead if it's a problem
|
Can revisit using $HOME instead if it's a problem
|
||||||
'''
|
"""
|
||||||
if not remote_path.startswith(os.path.sep):
|
if not remote_path.startswith(os.path.sep):
|
||||||
remote_path = os.path.join(os.path.sep, remote_path)
|
remote_path = os.path.join(os.path.sep, remote_path)
|
||||||
return os.path.normpath(remote_path)
|
return os.path.normpath(remote_path)
|
||||||
|
|
||||||
def put_file(self, in_path, out_path):
|
def put_file(self, in_path, out_path):
|
||||||
''' transfer a file from local to jail '''
|
""" transfer a file from local to jail """
|
||||||
super(Connection, self).put_file(in_path, out_path)
|
super(Connection, self).put_file(in_path, out_path)
|
||||||
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.jail)
|
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.jail)
|
||||||
|
|
||||||
@@ -173,7 +172,7 @@ class Connection(ConnectionBase):
|
|||||||
raise AnsibleError("file or module does not exist at: %s" % in_path)
|
raise AnsibleError("file or module does not exist at: %s" % in_path)
|
||||||
|
|
||||||
def fetch_file(self, in_path, out_path):
|
def fetch_file(self, in_path, out_path):
|
||||||
''' fetch a file from jail to local '''
|
""" fetch a file from jail to local """
|
||||||
super(Connection, self).fetch_file(in_path, out_path)
|
super(Connection, self).fetch_file(in_path, out_path)
|
||||||
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.jail)
|
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.jail)
|
||||||
|
|
||||||
@@ -197,6 +196,6 @@ class Connection(ConnectionBase):
|
|||||||
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, to_native(stdout), to_native(stderr)))
|
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, to_native(stdout), to_native(stderr)))
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
''' terminate the connection; nothing to do here '''
|
""" terminate the connection; nothing to do here """
|
||||||
super(Connection, self).close()
|
super(Connection, self).close()
|
||||||
self._connected = False
|
self._connected = False
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (c) 2015, Joerg Thalheim <joerg@higgsboson.tk>
|
# (c) 2015, Joerg Thalheim <joerg@higgsboson.tk>
|
||||||
# Copyright (c) 2017 Ansible Project
|
# Copyright (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
@@ -43,14 +42,13 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
from ansible import constants as C
|
|
||||||
from ansible import errors
|
from ansible import errors
|
||||||
from ansible.module_utils._text import to_bytes, to_native
|
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
||||||
from ansible.plugins.connection import ConnectionBase
|
from ansible.plugins.connection import ConnectionBase
|
||||||
|
|
||||||
|
|
||||||
class Connection(ConnectionBase):
|
class Connection(ConnectionBase):
|
||||||
''' Local lxc based connections '''
|
""" Local lxc based connections """
|
||||||
|
|
||||||
transport = 'community.general.lxc'
|
transport = 'community.general.lxc'
|
||||||
has_pipelining = True
|
has_pipelining = True
|
||||||
@@ -63,7 +61,7 @@ class Connection(ConnectionBase):
|
|||||||
self.container = None
|
self.container = None
|
||||||
|
|
||||||
def _connect(self):
|
def _connect(self):
|
||||||
''' connect to the lxc; nothing to do here '''
|
""" connect to the lxc; nothing to do here """
|
||||||
super(Connection, self)._connect()
|
super(Connection, self)._connect()
|
||||||
|
|
||||||
if not HAS_LIBLXC:
|
if not HAS_LIBLXC:
|
||||||
@@ -78,7 +76,8 @@ class Connection(ConnectionBase):
|
|||||||
if self.container.state == "STOPPED":
|
if self.container.state == "STOPPED":
|
||||||
raise errors.AnsibleError("%s is not running" % self.container_name)
|
raise errors.AnsibleError("%s is not running" % self.container_name)
|
||||||
|
|
||||||
def _communicate(self, pid, in_data, stdin, stdout, stderr):
|
@staticmethod
|
||||||
|
def _communicate(pid, in_data, stdin, stdout, stderr):
|
||||||
buf = {stdout: [], stderr: []}
|
buf = {stdout: [], stderr: []}
|
||||||
read_fds = [stdout, stderr]
|
read_fds = [stdout, stderr]
|
||||||
if in_data:
|
if in_data:
|
||||||
@@ -112,7 +111,7 @@ class Connection(ConnectionBase):
|
|||||||
return fd
|
return fd
|
||||||
|
|
||||||
def exec_command(self, cmd, in_data=None, sudoable=False):
|
def exec_command(self, cmd, in_data=None, sudoable=False):
|
||||||
''' run a command on the chroot '''
|
""" run a command on the chroot """
|
||||||
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
||||||
|
|
||||||
# python2-lxc needs bytes. python3-lxc needs text.
|
# python2-lxc needs bytes. python3-lxc needs text.
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# (c) 2016 Matt Clay <matt@mystile.com>
|
# (c) 2016 Matt Clay <matt@mystile.com>
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
@@ -47,7 +46,7 @@ from distutils.spawn import find_executable
|
|||||||
from subprocess import Popen, PIPE
|
from subprocess import Popen, PIPE
|
||||||
|
|
||||||
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
|
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
|
||||||
from ansible.module_utils._text import to_bytes, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
from ansible.plugins.connection import ConnectionBase
|
from ansible.plugins.connection import ConnectionBase
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Based on the buildah connection plugin
|
# Based on the buildah connection plugin
|
||||||
# Copyright (c) 2017 Ansible Project
|
# Copyright (c) 2017 Ansible Project
|
||||||
# 2018 Kushal Das
|
# 2018 Kushal Das
|
||||||
@@ -38,15 +37,9 @@ DOCUMENTATION = '''
|
|||||||
# - name: hosts
|
# - name: hosts
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import shlex
|
|
||||||
import shutil
|
|
||||||
|
|
||||||
import os
|
|
||||||
import base64
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
import ansible.constants as C
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
from ansible.module_utils._text import to_bytes, to_native
|
|
||||||
from ansible.plugins.connection import ConnectionBase, ensure_connect
|
from ansible.plugins.connection import ConnectionBase, ensure_connect
|
||||||
from ansible.errors import AnsibleConnectionFailure
|
from ansible.errors import AnsibleConnectionFailure
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
# Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
||||||
# Based on chroot.py (c) 2013, Maykel Moya <mmoya@speedyrails.com>
|
# Based on chroot.py (c) 2013, Maykel Moya <mmoya@speedyrails.com>
|
||||||
# Based on func.py
|
# Based on func.py
|
||||||
@@ -17,14 +16,11 @@ DOCUMENTATION = '''
|
|||||||
- This allows you to use existing Saltstack infrastructure to connect to targets.
|
- This allows you to use existing Saltstack infrastructure to connect to targets.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import re
|
|
||||||
import os
|
import os
|
||||||
import pty
|
import base64
|
||||||
import codecs
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
from ansible.module_utils._text import to_bytes, to_text
|
from ansible import errors
|
||||||
from ansible.module_utils.six.moves import cPickle
|
from ansible.plugins.connection import ConnectionBase
|
||||||
|
|
||||||
HAVE_SALTSTACK = False
|
HAVE_SALTSTACK = False
|
||||||
try:
|
try:
|
||||||
@@ -33,13 +29,9 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
import os
|
|
||||||
from ansible import errors
|
|
||||||
from ansible.plugins.connection import ConnectionBase
|
|
||||||
|
|
||||||
|
|
||||||
class Connection(ConnectionBase):
|
class Connection(ConnectionBase):
|
||||||
''' Salt-based connections '''
|
""" Salt-based connections """
|
||||||
|
|
||||||
has_pipelining = False
|
has_pipelining = False
|
||||||
# while the name of the product is salt, naming that module salt cause
|
# while the name of the product is salt, naming that module salt cause
|
||||||
@@ -59,29 +51,30 @@ class Connection(ConnectionBase):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def exec_command(self, cmd, sudoable=False, in_data=None):
|
def exec_command(self, cmd, sudoable=False, in_data=None):
|
||||||
''' run a command on the remote minion '''
|
""" run a command on the remote minion """
|
||||||
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
||||||
|
|
||||||
if in_data:
|
if in_data:
|
||||||
raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining")
|
raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining")
|
||||||
|
|
||||||
self._display.vvv("EXEC %s" % (cmd), host=self.host)
|
self._display.vvv("EXEC %s" % cmd, host=self.host)
|
||||||
# need to add 'true;' to work around https://github.com/saltstack/salt/issues/28077
|
# need to add 'true;' to work around https://github.com/saltstack/salt/issues/28077
|
||||||
res = self.client.cmd(self.host, 'cmd.exec_code_all', ['bash', 'true;' + cmd])
|
res = self.client.cmd(self.host, 'cmd.exec_code_all', ['bash', 'true;' + cmd])
|
||||||
if self.host not in res:
|
if self.host not in res:
|
||||||
raise errors.AnsibleError("Minion %s didn't answer, check if salt-minion is running and the name is correct" % self.host)
|
raise errors.AnsibleError("Minion %s didn't answer, check if salt-minion is running and the name is correct" % self.host)
|
||||||
|
|
||||||
p = res[self.host]
|
p = res[self.host]
|
||||||
return (p['retcode'], p['stdout'], p['stderr'])
|
return p['retcode'], p['stdout'], p['stderr']
|
||||||
|
|
||||||
def _normalize_path(self, path, prefix):
|
@staticmethod
|
||||||
|
def _normalize_path(path, prefix):
|
||||||
if not path.startswith(os.path.sep):
|
if not path.startswith(os.path.sep):
|
||||||
path = os.path.join(os.path.sep, path)
|
path = os.path.join(os.path.sep, path)
|
||||||
normpath = os.path.normpath(path)
|
normpath = os.path.normpath(path)
|
||||||
return os.path.join(prefix, normpath[1:])
|
return os.path.join(prefix, normpath[1:])
|
||||||
|
|
||||||
def put_file(self, in_path, out_path):
|
def put_file(self, in_path, out_path):
|
||||||
''' transfer a file from local to remote '''
|
""" transfer a file from local to remote """
|
||||||
|
|
||||||
super(Connection, self).put_file(in_path, out_path)
|
super(Connection, self).put_file(in_path, out_path)
|
||||||
|
|
||||||
@@ -89,11 +82,11 @@ class Connection(ConnectionBase):
|
|||||||
self._display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
|
self._display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
|
||||||
with open(in_path, 'rb') as in_fh:
|
with open(in_path, 'rb') as in_fh:
|
||||||
content = in_fh.read()
|
content = in_fh.read()
|
||||||
self.client.cmd(self.host, 'hashutil.base64_decodefile', [codecs.encode(content, 'base64'), out_path])
|
self.client.cmd(self.host, 'hashutil.base64_decodefile', [base64.b64encode(content), out_path])
|
||||||
|
|
||||||
# TODO test it
|
# TODO test it
|
||||||
def fetch_file(self, in_path, out_path):
|
def fetch_file(self, in_path, out_path):
|
||||||
''' fetch a file from remote to local '''
|
""" fetch a file from remote to local """
|
||||||
|
|
||||||
super(Connection, self).fetch_file(in_path, out_path)
|
super(Connection, self).fetch_file(in_path, out_path)
|
||||||
|
|
||||||
@@ -103,5 +96,5 @@ class Connection(ConnectionBase):
|
|||||||
open(out_path, 'wb').write(content)
|
open(out_path, 'wb').write(content)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
''' terminate the connection; nothing to do here '''
|
""" terminate the connection; nothing to do here """
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
# Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
||||||
# and chroot.py (c) 2013, Maykel Moya <mmoya@speedyrails.com>
|
# and chroot.py (c) 2013, Maykel Moya <mmoya@speedyrails.com>
|
||||||
# and jail.py (c) 2013, Michael Scherer <misc@zarb.org>
|
# and jail.py (c) 2013, Michael Scherer <misc@zarb.org>
|
||||||
@@ -32,10 +31,9 @@ import os.path
|
|||||||
import subprocess
|
import subprocess
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
from ansible import constants as C
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils.six.moves import shlex_quote
|
from ansible.module_utils.six.moves import shlex_quote
|
||||||
from ansible.module_utils._text import to_bytes
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
from ansible.plugins.connection import ConnectionBase, BUFSIZE
|
from ansible.plugins.connection import ConnectionBase, BUFSIZE
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
@@ -43,7 +41,7 @@ display = Display()
|
|||||||
|
|
||||||
|
|
||||||
class Connection(ConnectionBase):
|
class Connection(ConnectionBase):
|
||||||
''' Local zone based connections '''
|
""" Local zone based connections """
|
||||||
|
|
||||||
transport = 'community.general.zone'
|
transport = 'community.general.zone'
|
||||||
has_pipelining = True
|
has_pipelining = True
|
||||||
@@ -76,9 +74,9 @@ class Connection(ConnectionBase):
|
|||||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
|
||||||
zones = []
|
zones = []
|
||||||
for l in process.stdout.readlines():
|
for line in process.stdout.readlines():
|
||||||
# 1:work:running:/zones/work:3126dc59-9a07-4829-cde9-a816e4c5040e:native:shared
|
# 1:work:running:/zones/work:3126dc59-9a07-4829-cde9-a816e4c5040e:native:shared
|
||||||
s = l.split(':')
|
s = line.split(':')
|
||||||
if s[1] != 'global':
|
if s[1] != 'global':
|
||||||
zones.append(s[1])
|
zones.append(s[1])
|
||||||
|
|
||||||
@@ -96,20 +94,20 @@ class Connection(ConnectionBase):
|
|||||||
return path + '/root'
|
return path + '/root'
|
||||||
|
|
||||||
def _connect(self):
|
def _connect(self):
|
||||||
''' connect to the zone; nothing to do here '''
|
""" connect to the zone; nothing to do here """
|
||||||
super(Connection, self)._connect()
|
super(Connection, self)._connect()
|
||||||
if not self._connected:
|
if not self._connected:
|
||||||
display.vvv("THIS IS A LOCAL ZONE DIR", host=self.zone)
|
display.vvv("THIS IS A LOCAL ZONE DIR", host=self.zone)
|
||||||
self._connected = True
|
self._connected = True
|
||||||
|
|
||||||
def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE):
|
def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE):
|
||||||
''' run a command on the zone. This is only needed for implementing
|
""" run a command on the zone. This is only needed for implementing
|
||||||
put_file() get_file() so that we don't have to read the whole file
|
put_file() get_file() so that we don't have to read the whole file
|
||||||
into memory.
|
into memory.
|
||||||
|
|
||||||
compared to exec_command() it looses some niceties like being able to
|
compared to exec_command() it looses some niceties like being able to
|
||||||
return the process's exit code immediately.
|
return the process's exit code immediately.
|
||||||
'''
|
"""
|
||||||
# NOTE: zlogin invokes a shell (just like ssh does) so we do not pass
|
# NOTE: zlogin invokes a shell (just like ssh does) so we do not pass
|
||||||
# this through /bin/sh -c here. Instead it goes through the shell
|
# this through /bin/sh -c here. Instead it goes through the shell
|
||||||
# that zlogin selects.
|
# that zlogin selects.
|
||||||
@@ -123,16 +121,16 @@ class Connection(ConnectionBase):
|
|||||||
return p
|
return p
|
||||||
|
|
||||||
def exec_command(self, cmd, in_data=None, sudoable=False):
|
def exec_command(self, cmd, in_data=None, sudoable=False):
|
||||||
''' run a command on the zone '''
|
""" run a command on the zone """
|
||||||
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
||||||
|
|
||||||
p = self._buffered_exec_command(cmd)
|
p = self._buffered_exec_command(cmd)
|
||||||
|
|
||||||
stdout, stderr = p.communicate(in_data)
|
stdout, stderr = p.communicate(in_data)
|
||||||
return (p.returncode, stdout, stderr)
|
return p.returncode, stdout, stderr
|
||||||
|
|
||||||
def _prefix_login_path(self, remote_path):
|
def _prefix_login_path(self, remote_path):
|
||||||
''' Make sure that we put files into a standard path
|
""" Make sure that we put files into a standard path
|
||||||
|
|
||||||
If a path is relative, then we need to choose where to put it.
|
If a path is relative, then we need to choose where to put it.
|
||||||
ssh chooses $HOME but we aren't guaranteed that a home dir will
|
ssh chooses $HOME but we aren't guaranteed that a home dir will
|
||||||
@@ -140,13 +138,13 @@ class Connection(ConnectionBase):
|
|||||||
This also happens to be the former default.
|
This also happens to be the former default.
|
||||||
|
|
||||||
Can revisit using $HOME instead if it's a problem
|
Can revisit using $HOME instead if it's a problem
|
||||||
'''
|
"""
|
||||||
if not remote_path.startswith(os.path.sep):
|
if not remote_path.startswith(os.path.sep):
|
||||||
remote_path = os.path.join(os.path.sep, remote_path)
|
remote_path = os.path.join(os.path.sep, remote_path)
|
||||||
return os.path.normpath(remote_path)
|
return os.path.normpath(remote_path)
|
||||||
|
|
||||||
def put_file(self, in_path, out_path):
|
def put_file(self, in_path, out_path):
|
||||||
''' transfer a file from local to zone '''
|
""" transfer a file from local to zone """
|
||||||
super(Connection, self).put_file(in_path, out_path)
|
super(Connection, self).put_file(in_path, out_path)
|
||||||
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.zone)
|
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.zone)
|
||||||
|
|
||||||
@@ -172,7 +170,7 @@ class Connection(ConnectionBase):
|
|||||||
raise AnsibleError("file or module does not exist at: %s" % in_path)
|
raise AnsibleError("file or module does not exist at: %s" % in_path)
|
||||||
|
|
||||||
def fetch_file(self, in_path, out_path):
|
def fetch_file(self, in_path, out_path):
|
||||||
''' fetch a file from zone to local '''
|
""" fetch a file from zone to local """
|
||||||
super(Connection, self).fetch_file(in_path, out_path)
|
super(Connection, self).fetch_file(in_path, out_path)
|
||||||
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.zone)
|
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.zone)
|
||||||
|
|
||||||
@@ -196,6 +194,6 @@ class Connection(ConnectionBase):
|
|||||||
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
|
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
''' terminate the connection; nothing to do here '''
|
""" terminate the connection; nothing to do here """
|
||||||
super(Connection, self).close()
|
super(Connection, self).close()
|
||||||
self._connected = False
|
self._connected = False
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Copyright: (c) 2018, Hewlett Packard Enterprise Development LP
|
# Copyright: (c) 2018, Hewlett Packard Enterprise Development LP
|
||||||
# GNU General Public License v3.0+
|
# GNU General Public License v3.0+
|
||||||
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Copyright: (c) 2018, Huawei Inc.
|
# Copyright: (c) 2018, Huawei Inc.
|
||||||
# GNU General Public License v3.0+
|
# GNU General Public License v3.0+
|
||||||
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|||||||
@@ -30,7 +30,6 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Keycloak realm name to authenticate to for API access.
|
- Keycloak realm name to authenticate to for API access.
|
||||||
type: str
|
type: str
|
||||||
required: true
|
|
||||||
|
|
||||||
auth_client_secret:
|
auth_client_secret:
|
||||||
description:
|
description:
|
||||||
@@ -41,7 +40,6 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Username to authenticate for API access with.
|
- Username to authenticate for API access with.
|
||||||
type: str
|
type: str
|
||||||
required: true
|
|
||||||
aliases:
|
aliases:
|
||||||
- username
|
- username
|
||||||
|
|
||||||
@@ -49,10 +47,15 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Password to authenticate for API access with.
|
- Password to authenticate for API access with.
|
||||||
type: str
|
type: str
|
||||||
required: true
|
|
||||||
aliases:
|
aliases:
|
||||||
- password
|
- password
|
||||||
|
|
||||||
|
token:
|
||||||
|
description:
|
||||||
|
- Authentication token for Keycloak API.
|
||||||
|
type: str
|
||||||
|
version_added: 3.0.0
|
||||||
|
|
||||||
validate_certs:
|
validate_certs:
|
||||||
description:
|
description:
|
||||||
- Verify TLS certificates (do not disable this in production).
|
- Verify TLS certificates (do not disable this in production).
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Copyright (c) 2018, Oracle and/or its affiliates.
|
# Copyright (c) 2018, Oracle and/or its affiliates.
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Copyright (c) 2018, Oracle and/or its affiliates.
|
# Copyright (c) 2018, Oracle and/or its affiliates.
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Copyright (c) 2018, Oracle and/or its affiliates.
|
# Copyright (c) 2018, Oracle and/or its affiliates.
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Copyright (c) 2018, Oracle and/or its affiliates.
|
# Copyright (c) 2018, Oracle and/or its affiliates.
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Copyright (c) 2018, Oracle and/or its affiliates.
|
# Copyright (c) 2018, Oracle and/or its affiliates.
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
# Copyright (c) 2018, Oracle and/or its affiliates.
|
# Copyright (c) 2018, Oracle and/or its affiliates.
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
|||||||
@@ -1,59 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright: (c) 2016, Red Hat, Inc.
|
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleDocFragment(object):
|
|
||||||
|
|
||||||
# info standard oVirt documentation fragment
|
|
||||||
DOCUMENTATION = r'''
|
|
||||||
options:
|
|
||||||
fetch_nested:
|
|
||||||
description:
|
|
||||||
- If I(yes) the module will fetch additional data from the API.
|
|
||||||
- It will fetch only IDs of nested entity. It doesn't fetch multiple levels of nested attributes.
|
|
||||||
Only the attributes of the current entity. User can configure to fetch other
|
|
||||||
attributes of the nested entities by specifying C(nested_attributes).
|
|
||||||
type: bool
|
|
||||||
default: false
|
|
||||||
nested_attributes:
|
|
||||||
description:
|
|
||||||
- Specifies list of the attributes which should be fetched from the API.
|
|
||||||
- This parameter apply only when C(fetch_nested) is I(true).
|
|
||||||
type: list
|
|
||||||
auth:
|
|
||||||
description:
|
|
||||||
- "Dictionary with values needed to create HTTP/HTTPS connection to oVirt:"
|
|
||||||
- C(username)[I(required)] - The name of the user, something like I(admin@internal).
|
|
||||||
Default value is set by I(OVIRT_USERNAME) environment variable.
|
|
||||||
- "C(password)[I(required)] - The password of the user. Default value is set by I(OVIRT_PASSWORD) environment variable."
|
|
||||||
- "C(url)- A string containing the API URL of the server, usually
|
|
||||||
something like `I(https://server.example.com/ovirt-engine/api)`. Default value is set by I(OVIRT_URL) environment variable.
|
|
||||||
Either C(url) or C(hostname) is required."
|
|
||||||
- "C(hostname) - A string containing the hostname of the server, usually
|
|
||||||
something like `I(server.example.com)`. Default value is set by I(OVIRT_HOSTNAME) environment variable.
|
|
||||||
Either C(url) or C(hostname) is required."
|
|
||||||
- "C(token) - Token to be used instead of login with username/password. Default value is set by I(OVIRT_TOKEN) environment variable."
|
|
||||||
- "C(insecure) - A boolean flag that indicates if the server TLS
|
|
||||||
certificate and host name should be checked."
|
|
||||||
- "C(ca_file) - A PEM file containing the trusted CA certificates. The
|
|
||||||
certificate presented by the server will be verified using these CA
|
|
||||||
certificates. If `C(ca_file)` parameter is not set, system wide
|
|
||||||
CA certificate store is used. Default value is set by I(OVIRT_CAFILE) environment variable."
|
|
||||||
- "C(kerberos) - A boolean flag indicating if Kerberos authentication
|
|
||||||
should be used instead of the default basic authentication."
|
|
||||||
- "C(headers) - Dictionary of HTTP headers to be added to each API call."
|
|
||||||
type: dict
|
|
||||||
required: true
|
|
||||||
requirements:
|
|
||||||
- python >= 2.7
|
|
||||||
- ovirt-engine-sdk-python >= 4.3.0
|
|
||||||
notes:
|
|
||||||
- "In order to use this module you have to install oVirt Python SDK.
|
|
||||||
To ensure it's installed with correct version you can create the following task:
|
|
||||||
ansible.builtin.pip: name=ovirt-engine-sdk-python version=4.3.0"
|
|
||||||
'''
|
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
# coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# Copyright: (c) 2019, Sandeep Kasargod <sandeep@vexata.com>
|
# Copyright: (c) 2019, Sandeep Kasargod <sandeep@vexata.com>
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from __future__ import absolute_import, division, print_function
|
|||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
from ansible.errors import AnsibleFilterError
|
from ansible.errors import AnsibleFilterError
|
||||||
from ansible.module_utils._text import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
|
|
||||||
from ansible_collections.community.general.plugins.module_utils.csv import (initialize_dialect, read_csv, CSVError,
|
from ansible_collections.community.general.plugins.module_utils.csv import (initialize_dialect, read_csv, CSVError,
|
||||||
DialectNotAvailableError,
|
DialectNotAvailableError,
|
||||||
|
|||||||
42
plugins/filter/groupby.py
Normal file
42
plugins/filter/groupby.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright (c) 2021, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
from ansible.errors import AnsibleFilterError
|
||||||
|
from ansible.module_utils.common._collections_compat import Mapping, Sequence
|
||||||
|
|
||||||
|
|
||||||
|
def groupby_as_dict(sequence, attribute):
|
||||||
|
'''
|
||||||
|
Given a sequence of dictionaries and an attribute name, returns a dictionary mapping
|
||||||
|
the value of this attribute to the dictionary.
|
||||||
|
|
||||||
|
If multiple dictionaries in the sequence have the same value for this attribute,
|
||||||
|
the filter will fail.
|
||||||
|
'''
|
||||||
|
if not isinstance(sequence, Sequence):
|
||||||
|
raise AnsibleFilterError('Input is not a sequence')
|
||||||
|
|
||||||
|
result = dict()
|
||||||
|
for list_index, element in enumerate(sequence):
|
||||||
|
if not isinstance(element, Mapping):
|
||||||
|
raise AnsibleFilterError('Sequence element #{0} is not a mapping'.format(list_index))
|
||||||
|
if attribute not in element:
|
||||||
|
raise AnsibleFilterError('Attribute not contained in element #{0} of sequence'.format(list_index))
|
||||||
|
result_index = element[attribute]
|
||||||
|
if result_index in result:
|
||||||
|
raise AnsibleFilterError('Multiple sequence entries have attribute value {0!r}'.format(result_index))
|
||||||
|
result[result_index] = element
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class FilterModule(object):
|
||||||
|
''' Ansible list filters '''
|
||||||
|
|
||||||
|
def filters(self):
|
||||||
|
return {
|
||||||
|
'groupby_as_dict': groupby_as_dict,
|
||||||
|
}
|
||||||
97
plugins/filter/hashids.py
Normal file
97
plugins/filter/hashids.py
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright: (c) 2021, Andrew Pantuso (@ajpantuso) <ajpantuso@gmail.com>
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
from ansible.errors import (
|
||||||
|
AnsibleError,
|
||||||
|
AnsibleFilterError,
|
||||||
|
AnsibleFilterTypeError,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
|
from ansible.module_utils.common.collections import is_sequence
|
||||||
|
|
||||||
|
try:
|
||||||
|
from hashids import Hashids
|
||||||
|
HAS_HASHIDS = True
|
||||||
|
except ImportError:
|
||||||
|
HAS_HASHIDS = False
|
||||||
|
|
||||||
|
|
||||||
|
def initialize_hashids(**kwargs):
|
||||||
|
if not HAS_HASHIDS:
|
||||||
|
raise AnsibleError("The hashids library must be installed in order to use this plugin")
|
||||||
|
|
||||||
|
params = dict((k, v) for k, v in kwargs.items() if v)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return Hashids(**params)
|
||||||
|
except TypeError as e:
|
||||||
|
raise AnsibleFilterError(
|
||||||
|
"The provided parameters %s are invalid: %s" % (
|
||||||
|
', '.join(["%s=%s" % (k, v) for k, v in params.items()]),
|
||||||
|
to_native(e)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def hashids_encode(nums, salt=None, alphabet=None, min_length=None):
|
||||||
|
"""Generates a YouTube-like hash from a sequence of ints
|
||||||
|
|
||||||
|
:nums: Sequence of one or more ints to hash
|
||||||
|
:salt: String to use as salt when hashing
|
||||||
|
:alphabet: String of 16 or more unique characters to produce a hash
|
||||||
|
:min_length: Minimum length of hash produced
|
||||||
|
"""
|
||||||
|
|
||||||
|
hashids = initialize_hashids(
|
||||||
|
salt=salt,
|
||||||
|
alphabet=alphabet,
|
||||||
|
min_length=min_length
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handles the case where a single int is not encapsulated in a list or tuple.
|
||||||
|
# User convenience seems preferable to strict typing in this case
|
||||||
|
# Also avoids obfuscated error messages related to single invalid inputs
|
||||||
|
if not is_sequence(nums):
|
||||||
|
nums = [nums]
|
||||||
|
|
||||||
|
try:
|
||||||
|
hashid = hashids.encode(*nums)
|
||||||
|
except TypeError as e:
|
||||||
|
raise AnsibleFilterTypeError(
|
||||||
|
"Data to encode must by a tuple or list of ints: %s" % to_native(e)
|
||||||
|
)
|
||||||
|
|
||||||
|
return hashid
|
||||||
|
|
||||||
|
|
||||||
|
def hashids_decode(hashid, salt=None, alphabet=None, min_length=None):
|
||||||
|
"""Decodes a YouTube-like hash to a sequence of ints
|
||||||
|
|
||||||
|
:hashid: Hash string to decode
|
||||||
|
:salt: String to use as salt when hashing
|
||||||
|
:alphabet: String of 16 or more unique characters to produce a hash
|
||||||
|
:min_length: Minimum length of hash produced
|
||||||
|
"""
|
||||||
|
|
||||||
|
hashids = initialize_hashids(
|
||||||
|
salt=salt,
|
||||||
|
alphabet=alphabet,
|
||||||
|
min_length=min_length
|
||||||
|
)
|
||||||
|
nums = hashids.decode(hashid)
|
||||||
|
return list(nums)
|
||||||
|
|
||||||
|
|
||||||
|
class FilterModule(object):
|
||||||
|
|
||||||
|
def filters(self):
|
||||||
|
return {
|
||||||
|
'hashids_encode': hashids_encode,
|
||||||
|
'hashids_decode': hashids_decode,
|
||||||
|
}
|
||||||
@@ -72,7 +72,7 @@ from distutils.version import LooseVersion
|
|||||||
import socket
|
import socket
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils._text import to_bytes, to_native, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||||
from ansible.module_utils.common._collections_compat import MutableMapping
|
from ansible.module_utils.common._collections_compat import MutableMapping
|
||||||
from ansible.module_utils.six import iteritems
|
from ansible.module_utils.six import iteritems
|
||||||
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable, to_safe_group_name
|
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable, to_safe_group_name
|
||||||
|
|||||||
@@ -82,7 +82,7 @@ keyed_groups:
|
|||||||
'''
|
'''
|
||||||
|
|
||||||
from ansible.errors import AnsibleError, AnsibleParserError
|
from ansible.errors import AnsibleError, AnsibleParserError
|
||||||
from ansible.module_utils._text import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable
|
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|||||||
950
plugins/inventory/lxd.py
Normal file
950
plugins/inventory/lxd.py
Normal file
@@ -0,0 +1,950 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright: (c) 2021, Frank Dornheim <dornheim@posteo.de>
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
DOCUMENTATION = r'''
|
||||||
|
name: lxd
|
||||||
|
short_description: Returns Ansible inventory from lxd host
|
||||||
|
description:
|
||||||
|
- Get inventory from the lxd.
|
||||||
|
- Uses a YAML configuration file that ends with 'lxd.(yml|yaml)'.
|
||||||
|
version_added: "3.0.0"
|
||||||
|
author: "Frank Dornheim (@conloos)"
|
||||||
|
options:
|
||||||
|
plugin:
|
||||||
|
description: Token that ensures this is a source file for the 'lxd' plugin.
|
||||||
|
required: true
|
||||||
|
choices: [ 'community.general.lxd' ]
|
||||||
|
url:
|
||||||
|
description:
|
||||||
|
- The unix domain socket path or the https URL for the lxd server.
|
||||||
|
- Sockets in filesystem have to start with C(unix:).
|
||||||
|
- Mostly C(unix:/var/lib/lxd/unix.socket) or C(unix:/var/snap/lxd/common/lxd/unix.socket).
|
||||||
|
default: unix:/var/snap/lxd/common/lxd/unix.socket
|
||||||
|
type: str
|
||||||
|
client_key:
|
||||||
|
description:
|
||||||
|
- The client certificate key file path.
|
||||||
|
aliases: [ key_file ]
|
||||||
|
default: $HOME/.config/lxc/client.key
|
||||||
|
type: path
|
||||||
|
client_cert:
|
||||||
|
description:
|
||||||
|
- The client certificate file path.
|
||||||
|
aliases: [ cert_file ]
|
||||||
|
default: $HOME/.config/lxc/client.crt
|
||||||
|
type: path
|
||||||
|
trust_password:
|
||||||
|
description:
|
||||||
|
- The client trusted password.
|
||||||
|
- You need to set this password on the lxd server before
|
||||||
|
running this module using the following command
|
||||||
|
C(lxc config set core.trust_password <some random password>)
|
||||||
|
See U(https://www.stgraber.org/2016/04/18/lxd-api-direct-interaction/).
|
||||||
|
- If I(trust_password) is set, this module send a request for authentication before sending any requests.
|
||||||
|
type: str
|
||||||
|
state:
|
||||||
|
description: Filter the container according to the current status.
|
||||||
|
type: str
|
||||||
|
default: none
|
||||||
|
choices: [ 'STOPPED', 'STARTING', 'RUNNING', 'none' ]
|
||||||
|
prefered_container_network_interface:
|
||||||
|
description:
|
||||||
|
- If a container has multiple network interfaces, select which one is the prefered as pattern.
|
||||||
|
- Combined with the first number that can be found e.g. 'eth' + 0.
|
||||||
|
type: str
|
||||||
|
default: eth
|
||||||
|
prefered_container_network_family:
|
||||||
|
description:
|
||||||
|
- If a container has multiple network interfaces, which one is the prefered by family.
|
||||||
|
- Specify C(inet) for IPv4 and C(inet6) for IPv6.
|
||||||
|
type: str
|
||||||
|
default: inet
|
||||||
|
choices: [ 'inet', 'inet6' ]
|
||||||
|
groupby:
|
||||||
|
description:
|
||||||
|
- Create groups by the following keywords C(location), C(pattern), C(network_range), C(os), C(release), C(profile), C(vlanid).
|
||||||
|
- See example for syntax.
|
||||||
|
type: dict
|
||||||
|
'''
|
||||||
|
|
||||||
|
EXAMPLES = '''
|
||||||
|
# simple lxd.yml
|
||||||
|
plugin: community.general.lxd
|
||||||
|
url: unix:/var/snap/lxd/common/lxd/unix.socket
|
||||||
|
|
||||||
|
# simple lxd.yml including filter
|
||||||
|
plugin: community.general.lxd
|
||||||
|
url: unix:/var/snap/lxd/common/lxd/unix.socket
|
||||||
|
state: RUNNING
|
||||||
|
|
||||||
|
# grouping lxd.yml
|
||||||
|
groupby:
|
||||||
|
testpattern:
|
||||||
|
type: pattern
|
||||||
|
attribute: test
|
||||||
|
vlan666:
|
||||||
|
type: vlanid
|
||||||
|
attribute: 666
|
||||||
|
locationBerlin:
|
||||||
|
type: location
|
||||||
|
attribute: Berlin
|
||||||
|
osUbuntu:
|
||||||
|
type: os
|
||||||
|
attribute: ubuntu
|
||||||
|
releaseFocal:
|
||||||
|
type: release
|
||||||
|
attribute: focal
|
||||||
|
releaseBionic:
|
||||||
|
type: release
|
||||||
|
attribute: bionic
|
||||||
|
profileDefault:
|
||||||
|
type: profile
|
||||||
|
attribute: default
|
||||||
|
profileX11:
|
||||||
|
type: profile
|
||||||
|
attribute: x11
|
||||||
|
netRangeIPv4:
|
||||||
|
type: network_range
|
||||||
|
attribute: 10.98.143.0/24
|
||||||
|
netRangeIPv6:
|
||||||
|
type: network_range
|
||||||
|
attribute: fd42:bd00:7b11:2167:216:3eff::/24
|
||||||
|
'''
|
||||||
|
|
||||||
|
import binascii
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
import os
|
||||||
|
import socket
|
||||||
|
from ansible.plugins.inventory import BaseInventoryPlugin
|
||||||
|
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||||
|
from ansible.module_utils.common.dict_transformations import dict_merge
|
||||||
|
from ansible.errors import AnsibleError, AnsibleParserError
|
||||||
|
from ansible_collections.community.general.plugins.module_utils.compat import ipaddress
|
||||||
|
from ansible_collections.community.general.plugins.module_utils.lxd import LXDClient, LXDClientException
|
||||||
|
|
||||||
|
|
||||||
|
class InventoryModule(BaseInventoryPlugin):
|
||||||
|
DEBUG = 4
|
||||||
|
NAME = 'community.general.lxd'
|
||||||
|
SNAP_SOCKET_URL = 'unix:/var/snap/lxd/common/lxd/unix.socket'
|
||||||
|
SOCKET_URL = 'unix:/var/lib/lxd/unix.socket'
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load_json_data(path):
|
||||||
|
"""Load json data
|
||||||
|
|
||||||
|
Load json data from file
|
||||||
|
|
||||||
|
Args:
|
||||||
|
list(path): Path elements
|
||||||
|
str(file_name): Filename of data
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
dict(json_data): json data"""
|
||||||
|
try:
|
||||||
|
with open(path, 'r') as json_file:
|
||||||
|
return json.load(json_file)
|
||||||
|
except (IOError, json.decoder.JSONDecodeError) as err:
|
||||||
|
raise AnsibleParserError('Could not load the test data from {0}: {1}'.format(to_native(path), to_native(err)))
|
||||||
|
|
||||||
|
def save_json_data(self, path, file_name=None):
|
||||||
|
"""save data as json
|
||||||
|
|
||||||
|
Save data as json file
|
||||||
|
|
||||||
|
Args:
|
||||||
|
list(path): Path elements
|
||||||
|
str(file_name): Filename of data
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
|
||||||
|
if file_name:
|
||||||
|
path.append(file_name)
|
||||||
|
else:
|
||||||
|
prefix = 'lxd_data-'
|
||||||
|
time_stamp = time.strftime('%Y%m%d-%H%M%S')
|
||||||
|
suffix = '.atd'
|
||||||
|
path.append(prefix + time_stamp + suffix)
|
||||||
|
|
||||||
|
try:
|
||||||
|
cwd = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
with open(os.path.abspath(os.path.join(cwd, *path)), 'w') as json_file:
|
||||||
|
json.dump(self.data, json_file)
|
||||||
|
except IOError as err:
|
||||||
|
raise AnsibleParserError('Could not save data: {0}'.format(to_native(err)))
|
||||||
|
|
||||||
|
def verify_file(self, path):
|
||||||
|
"""Check the config
|
||||||
|
|
||||||
|
Return true/false if the config-file is valid for this plugin
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(path): path to the config
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
bool(valid): is valid"""
|
||||||
|
valid = False
|
||||||
|
if super(InventoryModule, self).verify_file(path):
|
||||||
|
if path.endswith(('lxd.yaml', 'lxd.yml')):
|
||||||
|
valid = True
|
||||||
|
else:
|
||||||
|
self.display.vvv('Inventory source not ending in "lxd.yaml" or "lxd.yml"')
|
||||||
|
return valid
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate_url(url):
|
||||||
|
"""validate url
|
||||||
|
|
||||||
|
check whether the url is correctly formatted
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
AnsibleError
|
||||||
|
Returns:
|
||||||
|
bool"""
|
||||||
|
if not isinstance(url, str):
|
||||||
|
return False
|
||||||
|
if not url.startswith(('unix:', 'https:')):
|
||||||
|
raise AnsibleError('URL is malformed: {0}'.format(to_native(url)))
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _connect_to_socket(self):
|
||||||
|
"""connect to lxd socket
|
||||||
|
|
||||||
|
Connect to lxd socket by provided url or defaults
|
||||||
|
|
||||||
|
Args:
|
||||||
|
None
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
AnsibleError
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
error_storage = {}
|
||||||
|
url_list = [self.get_option('url'), self.SNAP_SOCKET_URL, self.SOCKET_URL]
|
||||||
|
urls = (url for url in url_list if self.validate_url(url))
|
||||||
|
for url in urls:
|
||||||
|
try:
|
||||||
|
socket_connection = LXDClient(url, self.client_key, self.client_cert, self.debug)
|
||||||
|
return socket_connection
|
||||||
|
except LXDClientException as err:
|
||||||
|
error_storage[url] = err
|
||||||
|
raise AnsibleError('No connection to the socket: {0}'.format(to_native(error_storage)))
|
||||||
|
|
||||||
|
def _get_networks(self):
|
||||||
|
"""Get Networknames
|
||||||
|
|
||||||
|
Returns all network config names
|
||||||
|
|
||||||
|
Args:
|
||||||
|
None
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
list(names): names of all network_configs"""
|
||||||
|
# e.g. {'type': 'sync',
|
||||||
|
# 'status': 'Success',
|
||||||
|
# 'status_code': 200,
|
||||||
|
# 'operation': '',
|
||||||
|
# 'error_code': 0,
|
||||||
|
# 'error': '',
|
||||||
|
# 'metadata': ['/1.0/networks/lxdbr0']}
|
||||||
|
network_configs = self.socket.do('GET', '/1.0/networks')
|
||||||
|
return [m.split('/')[3] for m in network_configs['metadata']]
|
||||||
|
|
||||||
|
def _get_containers(self):
|
||||||
|
"""Get Containernames
|
||||||
|
|
||||||
|
Returns all containernames
|
||||||
|
|
||||||
|
Args:
|
||||||
|
None
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
list(names): names of all containers"""
|
||||||
|
# e.g. {'type': 'sync',
|
||||||
|
# 'status': 'Success',
|
||||||
|
# 'status_code': 200,
|
||||||
|
# 'operation': '',
|
||||||
|
# 'error_code': 0,
|
||||||
|
# 'error': '',
|
||||||
|
# 'metadata': ['/1.0/containers/udemy-ansible-ubuntu-2004']}
|
||||||
|
containers = self.socket.do('GET', '/1.0/containers')
|
||||||
|
return [m.split('/')[3] for m in containers['metadata']]
|
||||||
|
|
||||||
|
def _get_config(self, branch, name):
|
||||||
|
"""Get inventory of container
|
||||||
|
|
||||||
|
Get config of container
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(branch): Name oft the API-Branch
|
||||||
|
str(name): Name of Container
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Source:
|
||||||
|
https://github.com/lxc/lxd/blob/master/doc/rest-api.md
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
dict(config): Config of the container"""
|
||||||
|
config = {}
|
||||||
|
if isinstance(branch, (tuple, list)):
|
||||||
|
config[name] = {branch[1]: self.socket.do('GET', '/1.0/{0}/{1}/{2}'.format(to_native(branch[0]), to_native(name), to_native(branch[1])))}
|
||||||
|
else:
|
||||||
|
config[name] = {branch: self.socket.do('GET', '/1.0/{0}/{1}'.format(to_native(branch), to_native(name)))}
|
||||||
|
return config
|
||||||
|
|
||||||
|
def get_container_data(self, names):
|
||||||
|
"""Create Inventory of the container
|
||||||
|
|
||||||
|
Iterate through the different branches of the containers and collect Informations.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
list(names): List of container names
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
# tuple(('instances','metadata/templates')) to get section in branch
|
||||||
|
# e.g. /1.0/instances/<name>/metadata/templates
|
||||||
|
branches = ['containers', ('instances', 'state')]
|
||||||
|
container_config = {}
|
||||||
|
for branch in branches:
|
||||||
|
for name in names:
|
||||||
|
container_config['containers'] = self._get_config(branch, name)
|
||||||
|
self.data = dict_merge(container_config, self.data)
|
||||||
|
|
||||||
|
def get_network_data(self, names):
|
||||||
|
"""Create Inventory of the container
|
||||||
|
|
||||||
|
Iterate through the different branches of the containers and collect Informations.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
list(names): List of container names
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
# tuple(('instances','metadata/templates')) to get section in branch
|
||||||
|
# e.g. /1.0/instances/<name>/metadata/templates
|
||||||
|
branches = [('networks', 'state')]
|
||||||
|
network_config = {}
|
||||||
|
for branch in branches:
|
||||||
|
for name in names:
|
||||||
|
try:
|
||||||
|
network_config['networks'] = self._get_config(branch, name)
|
||||||
|
except LXDClientException:
|
||||||
|
network_config['networks'] = {name: None}
|
||||||
|
self.data = dict_merge(network_config, self.data)
|
||||||
|
|
||||||
|
def extract_network_information_from_container_config(self, container_name):
|
||||||
|
"""Returns the network interface configuration
|
||||||
|
|
||||||
|
Returns the network ipv4 and ipv6 config of the container without local-link
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(container_name): Name oft he container
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
dict(network_configuration): network config"""
|
||||||
|
container_network_interfaces = self._get_data_entry('containers/{0}/state/metadata/network'.format(container_name))
|
||||||
|
network_configuration = None
|
||||||
|
if container_network_interfaces:
|
||||||
|
network_configuration = {}
|
||||||
|
gen_interface_names = [interface_name for interface_name in container_network_interfaces if interface_name != 'lo']
|
||||||
|
for interface_name in gen_interface_names:
|
||||||
|
gen_address = [address for address in container_network_interfaces[interface_name]['addresses'] if address.get('scope') != 'link']
|
||||||
|
network_configuration[interface_name] = []
|
||||||
|
for address in gen_address:
|
||||||
|
address_set = {}
|
||||||
|
address_set['family'] = address.get('family')
|
||||||
|
address_set['address'] = address.get('address')
|
||||||
|
address_set['netmask'] = address.get('netmask')
|
||||||
|
address_set['combined'] = address.get('address') + '/' + address.get('netmask')
|
||||||
|
network_configuration[interface_name].append(address_set)
|
||||||
|
return network_configuration
|
||||||
|
|
||||||
|
def get_prefered_container_network_interface(self, container_name):
|
||||||
|
"""Helper to get the prefered interface of thr container
|
||||||
|
|
||||||
|
Helper to get the prefered interface provide by neme pattern from 'prefered_container_network_interface'.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(containe_name): name of container
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
str(prefered_interface): None or interface name"""
|
||||||
|
container_network_interfaces = self._get_data_entry('inventory/{0}/network_interfaces'.format(container_name))
|
||||||
|
prefered_interface = None # init
|
||||||
|
if container_network_interfaces: # container have network interfaces
|
||||||
|
# generator if interfaces which start with the desired pattern
|
||||||
|
net_generator = [interface for interface in container_network_interfaces if interface.startswith(self.prefered_container_network_interface)]
|
||||||
|
selected_interfaces = [] # init
|
||||||
|
for interface in net_generator:
|
||||||
|
selected_interfaces.append(interface)
|
||||||
|
if len(selected_interfaces) > 0:
|
||||||
|
prefered_interface = sorted(selected_interfaces)[0]
|
||||||
|
return prefered_interface
|
||||||
|
|
||||||
|
def get_container_vlans(self, container_name):
|
||||||
|
"""Get VLAN(s) from container
|
||||||
|
|
||||||
|
Helper to get the VLAN_ID from the container
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(containe_name): name of container
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
# get network device configuration and store {network: vlan_id}
|
||||||
|
network_vlans = {}
|
||||||
|
for network in self._get_data_entry('networks'):
|
||||||
|
if self._get_data_entry('state/metadata/vlan/vid', data=self.data['networks'].get(network)):
|
||||||
|
network_vlans[network] = self._get_data_entry('state/metadata/vlan/vid', data=self.data['networks'].get(network))
|
||||||
|
|
||||||
|
# get networkdevices of container and return
|
||||||
|
# e.g.
|
||||||
|
# "eth0":{ "name":"eth0",
|
||||||
|
# "network":"lxdbr0",
|
||||||
|
# "type":"nic"},
|
||||||
|
vlan_ids = {}
|
||||||
|
devices = self._get_data_entry('containers/{0}/containers/metadata/expanded_devices'.format(to_native(container_name)))
|
||||||
|
for device in devices:
|
||||||
|
if 'network' in devices[device]:
|
||||||
|
if devices[device]['network'] in network_vlans:
|
||||||
|
vlan_ids[devices[device].get('network')] = network_vlans[devices[device].get('network')]
|
||||||
|
return vlan_ids if vlan_ids else None
|
||||||
|
|
||||||
|
def _get_data_entry(self, path, data=None, delimiter='/'):
|
||||||
|
"""Helper to get data
|
||||||
|
|
||||||
|
Helper to get data from self.data by a path like 'path/to/target'
|
||||||
|
Attention: Escaping of the delimiter is not (yet) provided.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(path): path to nested dict
|
||||||
|
Kwargs:
|
||||||
|
dict(data): datastore
|
||||||
|
str(delimiter): delimiter in Path.
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
*(value)"""
|
||||||
|
try:
|
||||||
|
if not data:
|
||||||
|
data = self.data
|
||||||
|
if delimiter in path:
|
||||||
|
path = path.split(delimiter)
|
||||||
|
|
||||||
|
if isinstance(path, list) and len(path) > 1:
|
||||||
|
data = data[path.pop(0)]
|
||||||
|
path = delimiter.join(path)
|
||||||
|
return self._get_data_entry(path, data, delimiter) # recursion
|
||||||
|
return data[path]
|
||||||
|
except KeyError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _set_data_entry(self, container_name, key, value, path=None):
|
||||||
|
"""Helper to save data
|
||||||
|
|
||||||
|
Helper to save the data in self.data
|
||||||
|
Detect if data is allready in branch and use dict_merge() to prevent that branch is overwritten.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(container_name): name of container
|
||||||
|
str(key): same as dict
|
||||||
|
*(value): same as dict
|
||||||
|
Kwargs:
|
||||||
|
str(path): path to branch-part
|
||||||
|
Raises:
|
||||||
|
AnsibleParserError
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
if not path:
|
||||||
|
path = self.data['inventory']
|
||||||
|
if container_name not in path:
|
||||||
|
path[container_name] = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
if isinstance(value, dict) and key in path[container_name]:
|
||||||
|
path[container_name] = dict_merge(value, path[container_name][key])
|
||||||
|
else:
|
||||||
|
path[container_name][key] = value
|
||||||
|
except KeyError as err:
|
||||||
|
raise AnsibleParserError("Unable to store Informations: {0}".format(to_native(err)))
|
||||||
|
|
||||||
|
def extract_information_from_container_configs(self):
|
||||||
|
"""Process configuration information
|
||||||
|
|
||||||
|
Preparation of the data
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dict(configs): Container configurations
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
# create branch "inventory"
|
||||||
|
if 'inventory' not in self.data:
|
||||||
|
self.data['inventory'] = {}
|
||||||
|
|
||||||
|
for container_name in self.data['containers']:
|
||||||
|
self._set_data_entry(container_name, 'os', self._get_data_entry(
|
||||||
|
'containers/{0}/containers/metadata/config/image.os'.format(container_name)))
|
||||||
|
self._set_data_entry(container_name, 'release', self._get_data_entry(
|
||||||
|
'containers/{0}/containers/metadata/config/image.release'.format(container_name)))
|
||||||
|
self._set_data_entry(container_name, 'version', self._get_data_entry(
|
||||||
|
'containers/{0}/containers/metadata/config/image.version'.format(container_name)))
|
||||||
|
self._set_data_entry(container_name, 'profile', self._get_data_entry(
|
||||||
|
'containers/{0}/containers/metadata/profiles'.format(container_name)))
|
||||||
|
self._set_data_entry(container_name, 'location', self._get_data_entry(
|
||||||
|
'containers/{0}/containers/metadata/location'.format(container_name)))
|
||||||
|
self._set_data_entry(container_name, 'state', self._get_data_entry(
|
||||||
|
'containers/{0}/containers/metadata/config/volatile.last_state.power'.format(container_name)))
|
||||||
|
self._set_data_entry(container_name, 'network_interfaces', self.extract_network_information_from_container_config(container_name))
|
||||||
|
self._set_data_entry(container_name, 'preferred_interface', self.get_prefered_container_network_interface(container_name))
|
||||||
|
self._set_data_entry(container_name, 'vlan_ids', self.get_container_vlans(container_name))
|
||||||
|
|
||||||
|
def build_inventory_network(self, container_name):
|
||||||
|
"""Add the network interfaces of the container to the inventory
|
||||||
|
|
||||||
|
Logic:
|
||||||
|
- if the container have no interface -> 'ansible_connection: local'
|
||||||
|
- get preferred_interface & prefered_container_network_family -> 'ansible_connection: ssh' & 'ansible_host: <IP>'
|
||||||
|
- first Interface from: network_interfaces prefered_container_network_family -> 'ansible_connection: ssh' & 'ansible_host: <IP>'
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(container_name): name of container
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
|
||||||
|
def interface_selection(container_name):
|
||||||
|
"""Select container Interface for inventory
|
||||||
|
|
||||||
|
Logic:
|
||||||
|
- get preferred_interface & prefered_container_network_family -> str(IP)
|
||||||
|
- first Interface from: network_interfaces prefered_container_network_family -> str(IP)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(container_name): name of container
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
dict(interface_name: ip)"""
|
||||||
|
prefered_interface = self._get_data_entry('inventory/{0}/preferred_interface'.format(container_name)) # name or None
|
||||||
|
prefered_container_network_family = self.prefered_container_network_family
|
||||||
|
|
||||||
|
ip_address = ''
|
||||||
|
if prefered_interface:
|
||||||
|
interface = self._get_data_entry('inventory/{0}/network_interfaces/{1}'.format(container_name, prefered_interface))
|
||||||
|
for config in interface:
|
||||||
|
if config['family'] == prefered_container_network_family:
|
||||||
|
ip_address = config['address']
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
interface = self._get_data_entry('inventory/{0}/network_interfaces'.format(container_name))
|
||||||
|
for config in interface:
|
||||||
|
if config['family'] == prefered_container_network_family:
|
||||||
|
ip_address = config['address']
|
||||||
|
break
|
||||||
|
return ip_address
|
||||||
|
|
||||||
|
if self._get_data_entry('inventory/{0}/network_interfaces'.format(container_name)): # container have network interfaces
|
||||||
|
if self._get_data_entry('inventory/{0}/preferred_interface'.format(container_name)): # container have a preferred interface
|
||||||
|
self.inventory.set_variable(container_name, 'ansible_connection', 'ssh')
|
||||||
|
self.inventory.set_variable(container_name, 'ansible_host', interface_selection(container_name))
|
||||||
|
else:
|
||||||
|
self.inventory.set_variable(container_name, 'ansible_connection', 'local')
|
||||||
|
|
||||||
|
def build_inventory_hosts(self):
|
||||||
|
"""Build host-part dynamic inventory
|
||||||
|
|
||||||
|
Build the host-part of the dynamic inventory.
|
||||||
|
Add Hosts and host_vars to the inventory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
None
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
for container_name in self.data['inventory']:
|
||||||
|
# Only consider containers that match the "state" filter, if self.state is not None
|
||||||
|
if self.filter:
|
||||||
|
if self.filter.lower() != self._get_data_entry('inventory/{0}/state'.format(container_name)).lower():
|
||||||
|
continue
|
||||||
|
# add container
|
||||||
|
self.inventory.add_host(container_name)
|
||||||
|
# add network informations
|
||||||
|
self.build_inventory_network(container_name)
|
||||||
|
# add os
|
||||||
|
self.inventory.set_variable(container_name, 'ansible_lxd_os', self._get_data_entry('inventory/{0}/os'.format(container_name)).lower())
|
||||||
|
# add release
|
||||||
|
self.inventory.set_variable(container_name, 'ansible_lxd_release', self._get_data_entry('inventory/{0}/release'.format(container_name)).lower())
|
||||||
|
# add profile
|
||||||
|
self.inventory.set_variable(container_name, 'ansible_lxd_profile', self._get_data_entry('inventory/{0}/profile'.format(container_name)))
|
||||||
|
# add state
|
||||||
|
self.inventory.set_variable(container_name, 'ansible_lxd_state', self._get_data_entry('inventory/{0}/state'.format(container_name)).lower())
|
||||||
|
# add location information
|
||||||
|
if self._get_data_entry('inventory/{0}/location'.format(container_name)) != "none": # wrong type by lxd 'none' != 'None'
|
||||||
|
self.inventory.set_variable(container_name, 'ansible_lxd_location', self._get_data_entry('inventory/{0}/location'.format(container_name)))
|
||||||
|
# add VLAN_ID information
|
||||||
|
if self._get_data_entry('inventory/{0}/vlan_ids'.format(container_name)):
|
||||||
|
self.inventory.set_variable(container_name, 'ansible_lxd_vlan_ids', self._get_data_entry('inventory/{0}/vlan_ids'.format(container_name)))
|
||||||
|
|
||||||
|
def build_inventory_groups_location(self, group_name):
|
||||||
|
"""create group by attribute: location
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(group_name): Group name
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
# maybe we just want to expand one group
|
||||||
|
if group_name not in self.inventory.groups:
|
||||||
|
self.inventory.add_group(group_name)
|
||||||
|
|
||||||
|
for container_name in self.inventory.hosts:
|
||||||
|
if 'ansible_lxd_location' in self.inventory.get_host(container_name).get_vars():
|
||||||
|
self.inventory.add_child(group_name, container_name)
|
||||||
|
|
||||||
|
def build_inventory_groups_pattern(self, group_name):
|
||||||
|
"""create group by name pattern
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(group_name): Group name
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
# maybe we just want to expand one group
|
||||||
|
if group_name not in self.inventory.groups:
|
||||||
|
self.inventory.add_group(group_name)
|
||||||
|
|
||||||
|
regex_pattern = self.groupby[group_name].get('attribute')
|
||||||
|
|
||||||
|
for container_name in self.inventory.hosts:
|
||||||
|
result = re.search(regex_pattern, container_name)
|
||||||
|
if result:
|
||||||
|
self.inventory.add_child(group_name, container_name)
|
||||||
|
|
||||||
|
def build_inventory_groups_network_range(self, group_name):
|
||||||
|
"""check if IP is in network-class
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(group_name): Group name
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
# maybe we just want to expand one group
|
||||||
|
if group_name not in self.inventory.groups:
|
||||||
|
self.inventory.add_group(group_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
network = ipaddress.ip_network(to_text(self.groupby[group_name].get('attribute')))
|
||||||
|
except ValueError as err:
|
||||||
|
raise AnsibleParserError(
|
||||||
|
'Error while parsing network range {0}: {1}'.format(self.groupby[group_name].get('attribute'), to_native(err)))
|
||||||
|
|
||||||
|
for container_name in self.inventory.hosts:
|
||||||
|
if self.data['inventory'][container_name].get('network_interfaces') is not None:
|
||||||
|
for interface in self.data['inventory'][container_name].get('network_interfaces'):
|
||||||
|
for interface_family in self.data['inventory'][container_name].get('network_interfaces')[interface]:
|
||||||
|
try:
|
||||||
|
address = ipaddress.ip_address(to_text(interface_family['address']))
|
||||||
|
if address.version == network.version and address in network:
|
||||||
|
self.inventory.add_child(group_name, container_name)
|
||||||
|
except ValueError:
|
||||||
|
# Ignore invalid IP addresses returned by lxd
|
||||||
|
pass
|
||||||
|
|
||||||
|
def build_inventory_groups_os(self, group_name):
|
||||||
|
"""create group by attribute: os
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(group_name): Group name
|
||||||
|
Kwargs:
|
||||||
|
Noneself.data['inventory'][container_name][interface]
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
# maybe we just want to expand one group
|
||||||
|
if group_name not in self.inventory.groups:
|
||||||
|
self.inventory.add_group(group_name)
|
||||||
|
|
||||||
|
gen_containers = [
|
||||||
|
container_name for container_name in self.inventory.hosts
|
||||||
|
if 'ansible_lxd_os' in self.inventory.get_host(container_name).get_vars()]
|
||||||
|
for container_name in gen_containers:
|
||||||
|
if self.groupby[group_name].get('attribute').lower() == self.inventory.get_host(container_name).get_vars().get('ansible_lxd_os'):
|
||||||
|
self.inventory.add_child(group_name, container_name)
|
||||||
|
|
||||||
|
def build_inventory_groups_release(self, group_name):
|
||||||
|
"""create group by attribute: release
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(group_name): Group name
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
# maybe we just want to expand one group
|
||||||
|
if group_name not in self.inventory.groups:
|
||||||
|
self.inventory.add_group(group_name)
|
||||||
|
|
||||||
|
gen_containers = [
|
||||||
|
container_name for container_name in self.inventory.hosts
|
||||||
|
if 'ansible_lxd_release' in self.inventory.get_host(container_name).get_vars()]
|
||||||
|
for container_name in gen_containers:
|
||||||
|
if self.groupby[group_name].get('attribute').lower() == self.inventory.get_host(container_name).get_vars().get('ansible_lxd_release'):
|
||||||
|
self.inventory.add_child(group_name, container_name)
|
||||||
|
|
||||||
|
def build_inventory_groups_profile(self, group_name):
|
||||||
|
"""create group by attribute: profile
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(group_name): Group name
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
# maybe we just want to expand one group
|
||||||
|
if group_name not in self.inventory.groups:
|
||||||
|
self.inventory.add_group(group_name)
|
||||||
|
|
||||||
|
gen_containers = [
|
||||||
|
container_name for container_name in self.inventory.hosts.keys()
|
||||||
|
if 'ansible_lxd_profile' in self.inventory.get_host(container_name).get_vars().keys()]
|
||||||
|
for container_name in gen_containers:
|
||||||
|
if self.groupby[group_name].get('attribute').lower() in self.inventory.get_host(container_name).get_vars().get('ansible_lxd_profile'):
|
||||||
|
self.inventory.add_child(group_name, container_name)
|
||||||
|
|
||||||
|
def build_inventory_groups_vlanid(self, group_name):
|
||||||
|
"""create group by attribute: vlanid
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(group_name): Group name
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
# maybe we just want to expand one group
|
||||||
|
if group_name not in self.inventory.groups:
|
||||||
|
self.inventory.add_group(group_name)
|
||||||
|
|
||||||
|
gen_containers = [
|
||||||
|
container_name for container_name in self.inventory.hosts.keys()
|
||||||
|
if 'ansible_lxd_vlan_ids' in self.inventory.get_host(container_name).get_vars().keys()]
|
||||||
|
for container_name in gen_containers:
|
||||||
|
if self.groupby[group_name].get('attribute') in self.inventory.get_host(container_name).get_vars().get('ansible_lxd_vlan_ids').values():
|
||||||
|
self.inventory.add_child(group_name, container_name)
|
||||||
|
|
||||||
|
def build_inventory_groups(self):
|
||||||
|
"""Build group-part dynamic inventory
|
||||||
|
|
||||||
|
Build the group-part of the dynamic inventory.
|
||||||
|
Add groups to the inventory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
None
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
|
||||||
|
def group_type(group_name):
|
||||||
|
"""create groups defined by lxd.yml or defaultvalues
|
||||||
|
|
||||||
|
create groups defined by lxd.yml or defaultvalues
|
||||||
|
supportetd:
|
||||||
|
* 'location'
|
||||||
|
* 'pattern'
|
||||||
|
* 'network_range'
|
||||||
|
* 'os'
|
||||||
|
* 'release'
|
||||||
|
* 'profile'
|
||||||
|
* 'vlanid'
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(group_name): Group name
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
|
||||||
|
# Due to the compatibility with python 2 no use of map
|
||||||
|
if self.groupby[group_name].get('type') == 'location':
|
||||||
|
self.build_inventory_groups_location(group_name)
|
||||||
|
elif self.groupby[group_name].get('type') == 'pattern':
|
||||||
|
self.build_inventory_groups_pattern(group_name)
|
||||||
|
elif self.groupby[group_name].get('type') == 'network_range':
|
||||||
|
self.build_inventory_groups_network_range(group_name)
|
||||||
|
elif self.groupby[group_name].get('type') == 'os':
|
||||||
|
self.build_inventory_groups_os(group_name)
|
||||||
|
elif self.groupby[group_name].get('type') == 'release':
|
||||||
|
self.build_inventory_groups_release(group_name)
|
||||||
|
elif self.groupby[group_name].get('type') == 'profile':
|
||||||
|
self.build_inventory_groups_profile(group_name)
|
||||||
|
elif self.groupby[group_name].get('type') == 'vlanid':
|
||||||
|
self.build_inventory_groups_vlanid(group_name)
|
||||||
|
else:
|
||||||
|
raise AnsibleParserError('Unknown group type: {0}'.format(to_native(group_name)))
|
||||||
|
|
||||||
|
if self.groupby:
|
||||||
|
for group_name in self.groupby:
|
||||||
|
if not group_name.isalnum():
|
||||||
|
raise AnsibleParserError('Invalid character(s) in groupname: {0}'.format(to_native(group_name)))
|
||||||
|
group_type(group_name)
|
||||||
|
|
||||||
|
def build_inventory(self):
|
||||||
|
"""Build dynamic inventory
|
||||||
|
|
||||||
|
Build the dynamic inventory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
None
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
|
||||||
|
self.build_inventory_hosts()
|
||||||
|
self.build_inventory_groups()
|
||||||
|
|
||||||
|
def _populate(self):
|
||||||
|
"""Return the hosts and groups
|
||||||
|
|
||||||
|
Returns the processed container configurations from the lxd import
|
||||||
|
|
||||||
|
Args:
|
||||||
|
None
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
None
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
|
||||||
|
if len(self.data) == 0: # If no data is injected by unittests open socket
|
||||||
|
self.socket = self._connect_to_socket()
|
||||||
|
self.get_container_data(self._get_containers())
|
||||||
|
self.get_network_data(self._get_networks())
|
||||||
|
|
||||||
|
self.extract_information_from_container_configs()
|
||||||
|
|
||||||
|
# self.display.vvv(self.save_json_data([os.path.abspath(__file__)]))
|
||||||
|
|
||||||
|
self.build_inventory()
|
||||||
|
|
||||||
|
def parse(self, inventory, loader, path, cache):
|
||||||
|
"""Return dynamic inventory from source
|
||||||
|
|
||||||
|
Returns the processed inventory from the lxd import
|
||||||
|
|
||||||
|
Args:
|
||||||
|
str(inventory): inventory object with existing data and
|
||||||
|
the methods to add hosts/groups/variables
|
||||||
|
to inventory
|
||||||
|
str(loader): Ansible's DataLoader
|
||||||
|
str(path): path to the config
|
||||||
|
bool(cache): use or avoid caches
|
||||||
|
Kwargs:
|
||||||
|
None
|
||||||
|
Raises:
|
||||||
|
AnsibleParserError
|
||||||
|
Returns:
|
||||||
|
None"""
|
||||||
|
|
||||||
|
super(InventoryModule, self).parse(inventory, loader, path, cache=False)
|
||||||
|
# Read the inventory YAML file
|
||||||
|
self._read_config_data(path)
|
||||||
|
try:
|
||||||
|
self.client_key = self.get_option('client_key')
|
||||||
|
self.client_cert = self.get_option('client_cert')
|
||||||
|
self.debug = self.DEBUG
|
||||||
|
self.data = {} # store for inventory-data
|
||||||
|
self.groupby = self.get_option('groupby')
|
||||||
|
self.plugin = self.get_option('plugin')
|
||||||
|
self.prefered_container_network_family = self.get_option('prefered_container_network_family')
|
||||||
|
self.prefered_container_network_interface = self.get_option('prefered_container_network_interface')
|
||||||
|
if self.get_option('state').lower() == 'none': # none in config is str()
|
||||||
|
self.filter = None
|
||||||
|
else:
|
||||||
|
self.filter = self.get_option('state').lower()
|
||||||
|
self.trust_password = self.get_option('trust_password')
|
||||||
|
self.url = self.get_option('url')
|
||||||
|
except Exception as err:
|
||||||
|
raise AnsibleParserError(
|
||||||
|
'All correct options required: {0}'.format(to_native(err)))
|
||||||
|
# Call our internal helper to populate the dynamic inventory
|
||||||
|
self._populate()
|
||||||
@@ -56,7 +56,7 @@ from subprocess import Popen, PIPE
|
|||||||
|
|
||||||
from ansible import constants as C
|
from ansible import constants as C
|
||||||
from ansible.errors import AnsibleParserError
|
from ansible.errors import AnsibleParserError
|
||||||
from ansible.module_utils._text import to_native, to_text
|
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||||
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable, Cacheable
|
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable, Cacheable
|
||||||
from ansible.module_utils.common.process import get_bin_path
|
from ansible.module_utils.common.process import get_bin_path
|
||||||
|
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ from sys import version as python_version
|
|||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils.urls import open_url
|
from ansible.module_utils.urls import open_url
|
||||||
from ansible.plugins.inventory import BaseInventoryPlugin
|
from ansible.plugins.inventory import BaseInventoryPlugin
|
||||||
from ansible.module_utils._text import to_native, to_text
|
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||||
from ansible.module_utils.ansible_release import __version__ as ansible_version
|
from ansible.module_utils.ansible_release import __version__ as ansible_version
|
||||||
from ansible.module_utils.six.moves.urllib.parse import urljoin
|
from ansible.module_utils.six.moves.urllib.parse import urljoin
|
||||||
|
|
||||||
|
|||||||
@@ -70,6 +70,13 @@ DOCUMENTATION = '''
|
|||||||
description: Gather LXC/QEMU configuration facts.
|
description: Gather LXC/QEMU configuration facts.
|
||||||
default: no
|
default: no
|
||||||
type: bool
|
type: bool
|
||||||
|
want_proxmox_nodes_ansible_host:
|
||||||
|
version_added: 3.0.0
|
||||||
|
description:
|
||||||
|
- Whether to set C(ansbile_host) for proxmox nodes.
|
||||||
|
- When set to C(true) (default), will use the first available interface. This can be different from what you expect.
|
||||||
|
default: true
|
||||||
|
type: bool
|
||||||
strict:
|
strict:
|
||||||
version_added: 2.5.0
|
version_added: 2.5.0
|
||||||
compose:
|
compose:
|
||||||
@@ -234,13 +241,22 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
|||||||
)
|
)
|
||||||
)['result']
|
)['result']
|
||||||
|
|
||||||
|
if "error" in ifaces:
|
||||||
|
if "class" in ifaces["error"]:
|
||||||
|
# This happens on Windows, even though qemu agent is running, the IP address
|
||||||
|
# cannot be fetched, as it's unsupported, also a command disabled can happen.
|
||||||
|
errorClass = ifaces["error"]["class"]
|
||||||
|
if errorClass in ["Unsupported"]:
|
||||||
|
self.display.v("Retrieving network interfaces from guest agents on windows with older qemu-guest-agents is not supported")
|
||||||
|
elif errorClass in ["CommandDisabled"]:
|
||||||
|
self.display.v("Retrieving network interfaces from guest agents has been disabled")
|
||||||
|
return result
|
||||||
|
|
||||||
for iface in ifaces:
|
for iface in ifaces:
|
||||||
result.append({
|
result.append({
|
||||||
'name': iface['name'],
|
'name': iface['name'],
|
||||||
'mac-address': iface['hardware-address'],
|
'mac-address': iface['hardware-address'] if 'hardware-address' in iface else '',
|
||||||
'ip-addresses': [
|
'ip-addresses': ["%s/%s" % (ip['ip-address'], ip['prefix']) for ip in iface['ip-addresses']] if 'ip-addresses' in iface else []
|
||||||
"%s/%s" % (ip['ip-address'], ip['prefix']) for ip in iface['ip-addresses']
|
|
||||||
]
|
|
||||||
})
|
})
|
||||||
except requests.HTTPError:
|
except requests.HTTPError:
|
||||||
pass
|
pass
|
||||||
@@ -353,12 +369,10 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
|||||||
if node['type'] == 'node':
|
if node['type'] == 'node':
|
||||||
self.inventory.add_child(nodes_group, node['node'])
|
self.inventory.add_child(nodes_group, node['node'])
|
||||||
|
|
||||||
if node['status'] == 'offline':
|
|
||||||
continue
|
|
||||||
|
|
||||||
# get node IP address
|
# get node IP address
|
||||||
ip = self._get_node_ip(node['node'])
|
if self.get_option("want_proxmox_nodes_ansible_host"):
|
||||||
self.inventory.set_variable(node['node'], 'ansible_host', ip)
|
ip = self._get_node_ip(node['node'])
|
||||||
|
self.inventory.set_variable(node['node'], 'ansible_host', ip)
|
||||||
|
|
||||||
# get LXC containers for this node
|
# get LXC containers for this node
|
||||||
node_lxc_group = self.to_safe('%s%s' % (self.get_option('group_prefix'), ('%s_lxc' % node['node']).lower()))
|
node_lxc_group = self.to_safe('%s%s' % (self.get_option('group_prefix'), ('%s_lxc' % node['node']).lower()))
|
||||||
@@ -386,7 +400,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
|||||||
node_qemu_group = self.to_safe('%s%s' % (self.get_option('group_prefix'), ('%s_qemu' % node['node']).lower()))
|
node_qemu_group = self.to_safe('%s%s' % (self.get_option('group_prefix'), ('%s_qemu' % node['node']).lower()))
|
||||||
self.inventory.add_group(node_qemu_group)
|
self.inventory.add_group(node_qemu_group)
|
||||||
for qemu in self._get_qemu_per_node(node['node']):
|
for qemu in self._get_qemu_per_node(node['node']):
|
||||||
if qemu.get('template'):
|
if qemu['template']:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
self.inventory.add_host(qemu['name'])
|
self.inventory.add_host(qemu['name'])
|
||||||
|
|||||||
@@ -100,7 +100,7 @@ from ansible.errors import AnsibleError
|
|||||||
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable
|
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable
|
||||||
from ansible_collections.community.general.plugins.module_utils.scaleway import SCALEWAY_LOCATION, parse_pagination_link
|
from ansible_collections.community.general.plugins.module_utils.scaleway import SCALEWAY_LOCATION, parse_pagination_link
|
||||||
from ansible.module_utils.urls import open_url
|
from ansible.module_utils.urls import open_url
|
||||||
from ansible.module_utils._text import to_native, to_text
|
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||||
|
|
||||||
import ansible.module_utils.six.moves.urllib.parse as urllib_parse
|
import ansible.module_utils.six.moves.urllib.parse as urllib_parse
|
||||||
|
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ import os
|
|||||||
from subprocess import Popen, PIPE
|
from subprocess import Popen, PIPE
|
||||||
|
|
||||||
from ansible.errors import AnsibleParserError
|
from ansible.errors import AnsibleParserError
|
||||||
from ansible.module_utils._text import to_bytes, to_native, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||||
from ansible.module_utils.common._collections_compat import MutableMapping
|
from ansible.module_utils.common._collections_compat import MutableMapping
|
||||||
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable, Cacheable
|
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable, Cacheable
|
||||||
from ansible.module_utils.common.process import get_bin_path
|
from ansible.module_utils.common.process import get_bin_path
|
||||||
|
|||||||
@@ -106,7 +106,7 @@ import os
|
|||||||
from ansible.module_utils.six.moves.urllib.parse import urlparse
|
from ansible.module_utils.six.moves.urllib.parse import urlparse
|
||||||
from ansible.errors import AnsibleError, AnsibleAssertionError
|
from ansible.errors import AnsibleError, AnsibleAssertionError
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
from ansible.module_utils._text import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import consul
|
import consul
|
||||||
|
|||||||
@@ -74,7 +74,7 @@ from subprocess import Popen
|
|||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
from ansible.parsing.splitter import parse_kv
|
from ansible.parsing.splitter import parse_kv
|
||||||
from ansible.module_utils._text import to_bytes, to_text, to_native
|
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
display = Display()
|
display = Display()
|
||||||
|
|||||||
208
plugins/lookup/dependent.py
Normal file
208
plugins/lookup/dependent.py
Normal file
@@ -0,0 +1,208 @@
|
|||||||
|
# (c) 2015-2021, Felix Fontein <felix@fontein.de>
|
||||||
|
# (c) 2018 Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
DOCUMENTATION = """
|
||||||
|
name: dependent
|
||||||
|
short_description: Composes a list with nested elements of other lists or dicts which can depend on previous loop variables
|
||||||
|
version_added: 3.1.0
|
||||||
|
description:
|
||||||
|
- "Takes the input lists and returns a list with elements that are lists, dictionaries,
|
||||||
|
or template expressions which evaluate to lists or dicts, composed of the elements of
|
||||||
|
the input evaluated lists and dictionaries."
|
||||||
|
options:
|
||||||
|
_raw:
|
||||||
|
description:
|
||||||
|
- A list where the elements are one-element dictionaries, mapping a name to a string, list, or dictionary.
|
||||||
|
The name is the index that is used in the result object. The value is iterated over as described below.
|
||||||
|
- If the value is a list, it is simply iterated over.
|
||||||
|
- If the value is a dictionary, it is iterated over and returned as if they would be processed by the
|
||||||
|
R(ansible.builtin.dict2items filter,ansible_collections.ansible.builtin.dict2items_filter).
|
||||||
|
- If the value is a string, it is evaluated as Jinja2 expressions which can access the previously chosen
|
||||||
|
elements with C(item.<index_name>). The result must be a list or a dictionary.
|
||||||
|
type: list
|
||||||
|
elements: dict
|
||||||
|
required: true
|
||||||
|
"""
|
||||||
|
|
||||||
|
EXAMPLES = """
|
||||||
|
- name: Install/remove public keys for active admin users
|
||||||
|
ansible.posix.authorized_key:
|
||||||
|
user: "{{ item.admin.key }}"
|
||||||
|
key: "{{ lookup('file', item.key.public_key) }}"
|
||||||
|
state: "{{ 'present' if item.key.active else 'absent' }}"
|
||||||
|
when: item.admin.value.active
|
||||||
|
with_community.general.dependent:
|
||||||
|
- admin: admin_user_data
|
||||||
|
- key: admin_ssh_keys[item.admin.key]
|
||||||
|
loop_control:
|
||||||
|
# Makes the output readable, so that it doesn't contain the whole subdictionaries and lists
|
||||||
|
label: "{{ [item.admin.key, 'active' if item.key.active else 'inactive', item.key.public_key] }}"
|
||||||
|
vars:
|
||||||
|
admin_user_data:
|
||||||
|
admin1:
|
||||||
|
name: Alice
|
||||||
|
active: true
|
||||||
|
admin2:
|
||||||
|
name: Bob
|
||||||
|
active: true
|
||||||
|
admin_ssh_keys:
|
||||||
|
admin1:
|
||||||
|
- private_key: keys/private_key_admin1.pem
|
||||||
|
public_key: keys/private_key_admin1.pub
|
||||||
|
active: true
|
||||||
|
admin2:
|
||||||
|
- private_key: keys/private_key_admin2.pem
|
||||||
|
public_key: keys/private_key_admin2.pub
|
||||||
|
active: true
|
||||||
|
- private_key: keys/private_key_admin2-old.pem
|
||||||
|
public_key: keys/private_key_admin2-old.pub
|
||||||
|
active: false
|
||||||
|
|
||||||
|
- name: Update DNS records
|
||||||
|
community.aws.route53:
|
||||||
|
zone: "{{ item.zone.key }}"
|
||||||
|
record: "{{ item.prefix.key ~ '.' if item.prefix.key else '' }}{{ item.zone.key }}"
|
||||||
|
type: "{{ item.entry.key }}"
|
||||||
|
ttl: "{{ item.entry.value.ttl | default(3600) }}"
|
||||||
|
value: "{{ item.entry.value.value }}"
|
||||||
|
state: "{{ 'absent' if (item.entry.value.absent | default(False)) else 'present' }}"
|
||||||
|
overwrite: true
|
||||||
|
loop_control:
|
||||||
|
# Makes the output readable, so that it doesn't contain the whole subdictionaries and lists
|
||||||
|
label: |-
|
||||||
|
{{ [item.zone.key, item.prefix.key, item.entry.key,
|
||||||
|
item.entry.value.ttl | default(3600),
|
||||||
|
item.entry.value.absent | default(False), item.entry.value.value] }}
|
||||||
|
with_community.general.dependent:
|
||||||
|
- zone: dns_setup
|
||||||
|
- prefix: item.zone.value
|
||||||
|
- entry: item.prefix.value
|
||||||
|
vars:
|
||||||
|
dns_setup:
|
||||||
|
example.com:
|
||||||
|
'':
|
||||||
|
A:
|
||||||
|
value:
|
||||||
|
- 1.2.3.4
|
||||||
|
AAAA:
|
||||||
|
value:
|
||||||
|
- "2a01:1:2:3::1"
|
||||||
|
'test._domainkey':
|
||||||
|
TXT:
|
||||||
|
ttl: 300
|
||||||
|
value:
|
||||||
|
- '"k=rsa; t=s; p=MIGfMA..."'
|
||||||
|
example.org:
|
||||||
|
'www':
|
||||||
|
A:
|
||||||
|
value:
|
||||||
|
- 1.2.3.4
|
||||||
|
- 5.6.7.8
|
||||||
|
"""
|
||||||
|
|
||||||
|
RETURN = """
|
||||||
|
_list:
|
||||||
|
description:
|
||||||
|
- A list composed of dictionaries whose keys are the variable names from the input list.
|
||||||
|
type: list
|
||||||
|
elements: dict
|
||||||
|
sample:
|
||||||
|
- key1: a
|
||||||
|
key2: test
|
||||||
|
- key1: a
|
||||||
|
key2: foo
|
||||||
|
- key1: b
|
||||||
|
key2: bar
|
||||||
|
"""
|
||||||
|
|
||||||
|
from ansible.errors import AnsibleLookupError
|
||||||
|
from ansible.module_utils.common._collections_compat import Mapping, Sequence
|
||||||
|
from ansible.module_utils.six import string_types
|
||||||
|
from ansible.plugins.lookup import LookupBase
|
||||||
|
from ansible.template import Templar
|
||||||
|
|
||||||
|
|
||||||
|
class LookupModule(LookupBase):
|
||||||
|
def __evaluate(self, expression, templar, variables):
|
||||||
|
"""Evaluate expression with templar.
|
||||||
|
|
||||||
|
``expression`` is the expression to evaluate.
|
||||||
|
``variables`` are the variables to use.
|
||||||
|
"""
|
||||||
|
templar.available_variables = variables or {}
|
||||||
|
return templar.template("{0}{1}{2}".format("{{", expression, "}}"), cache=False)
|
||||||
|
|
||||||
|
def __process(self, result, terms, index, current, templar, variables):
|
||||||
|
"""Fills ``result`` list with evaluated items.
|
||||||
|
|
||||||
|
``result`` is a list where the resulting items are placed.
|
||||||
|
``terms`` is the parsed list of terms
|
||||||
|
``index`` is the current index to be processed in the list.
|
||||||
|
``current`` is a dictionary where the first ``index`` values are filled in.
|
||||||
|
``variables`` are the variables currently available.
|
||||||
|
"""
|
||||||
|
# If we are done, add to result list:
|
||||||
|
if index == len(terms):
|
||||||
|
result.append(current.copy())
|
||||||
|
return
|
||||||
|
|
||||||
|
key, expression, values = terms[index]
|
||||||
|
|
||||||
|
if expression is not None:
|
||||||
|
# Evaluate expression in current context
|
||||||
|
vars = variables.copy()
|
||||||
|
vars['item'] = current.copy()
|
||||||
|
try:
|
||||||
|
values = self.__evaluate(expression, templar, variables=vars)
|
||||||
|
except Exception as e:
|
||||||
|
raise AnsibleLookupError(
|
||||||
|
'Caught "{error}" while evaluating {key!r} with item == {item!r}'.format(
|
||||||
|
error=e, key=key, item=current))
|
||||||
|
|
||||||
|
if isinstance(values, Mapping):
|
||||||
|
for idx, val in sorted(values.items()):
|
||||||
|
current[key] = dict([('key', idx), ('value', val)])
|
||||||
|
self.__process(result, terms, index + 1, current, templar, variables)
|
||||||
|
elif isinstance(values, Sequence):
|
||||||
|
for elt in values:
|
||||||
|
current[key] = elt
|
||||||
|
self.__process(result, terms, index + 1, current, templar, variables)
|
||||||
|
else:
|
||||||
|
raise AnsibleLookupError(
|
||||||
|
'Did not obtain dictionary or list while evaluating {key!r} with item == {item!r}, but {type}'.format(
|
||||||
|
key=key, item=current, type=type(values)))
|
||||||
|
|
||||||
|
def run(self, terms, variables=None, **kwargs):
|
||||||
|
"""Generate list."""
|
||||||
|
result = []
|
||||||
|
if len(terms) > 0:
|
||||||
|
templar = Templar(loader=self._templar._loader)
|
||||||
|
data = []
|
||||||
|
vars_so_far = set()
|
||||||
|
for index, term in enumerate(terms):
|
||||||
|
if not isinstance(term, Mapping):
|
||||||
|
raise AnsibleLookupError(
|
||||||
|
'Parameter {index} must be a dictionary, got {type}'.format(
|
||||||
|
index=index, type=type(term)))
|
||||||
|
if len(term) != 1:
|
||||||
|
raise AnsibleLookupError(
|
||||||
|
'Parameter {index} must be a one-element dictionary, got {count} elements'.format(
|
||||||
|
index=index, count=len(term)))
|
||||||
|
k, v = list(term.items())[0]
|
||||||
|
if k in vars_so_far:
|
||||||
|
raise AnsibleLookupError(
|
||||||
|
'The variable {key!r} appears more than once'.format(key=k))
|
||||||
|
vars_so_far.add(k)
|
||||||
|
if isinstance(v, string_types):
|
||||||
|
data.append((k, v, None))
|
||||||
|
elif isinstance(v, (Sequence, Mapping)):
|
||||||
|
data.append((k, None, v))
|
||||||
|
else:
|
||||||
|
raise AnsibleLookupError(
|
||||||
|
'Parameter {key!r} (index {index}) must have a value of type string, dictionary or list, got type {type}'.format(
|
||||||
|
index=index, key=k, type=type(v)))
|
||||||
|
self.__process(result, data, 0, {}, templar, variables)
|
||||||
|
return result
|
||||||
@@ -152,7 +152,7 @@ RETURN = """
|
|||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
from ansible.module_utils._text import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ except ImportError:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils._text import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
|
|
||||||
# ==============================================================
|
# ==============================================================
|
||||||
|
|||||||
@@ -138,7 +138,7 @@ import re
|
|||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
from ansible.module_utils.basic import missing_required_lib
|
from ansible.module_utils.basic import missing_required_lib
|
||||||
from ansible.module_utils._text import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
from ansible.errors import AnsibleError, AnsibleLookupError
|
from ansible.errors import AnsibleError, AnsibleLookupError
|
||||||
|
|
||||||
|
|||||||
@@ -124,7 +124,7 @@ except ImportError:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
from ansible.module_utils._text import to_native, to_text
|
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
display = Display()
|
display = Display()
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ import os
|
|||||||
|
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
from ansible.utils.cmd_functions import run_cmd
|
from ansible.utils.cmd_functions import run_cmd
|
||||||
from ansible.module_utils._text import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
|
||||||
ANSIBLE_HIERA_CFG = os.getenv('ANSIBLE_HIERA_CFG', '/etc/hiera.yaml')
|
ANSIBLE_HIERA_CFG = os.getenv('ANSIBLE_HIERA_CFG', '/etc/hiera.yaml')
|
||||||
ANSIBLE_HIERA_BIN = os.getenv('ANSIBLE_HIERA_BIN', '/usr/bin/hiera')
|
ANSIBLE_HIERA_BIN = os.getenv('ANSIBLE_HIERA_BIN', '/usr/bin/hiera')
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ RETURN = """
|
|||||||
from subprocess import Popen, PIPE
|
from subprocess import Popen, PIPE
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils._text import to_bytes, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ _raw:
|
|||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
from ansible.module_utils._text import to_native, to_text
|
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||||
HAVE_LMDB = True
|
HAVE_LMDB = True
|
||||||
try:
|
try:
|
||||||
import lmdb
|
import lmdb
|
||||||
|
|||||||
@@ -25,6 +25,10 @@ DOCUMENTATION = '''
|
|||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: nios
|
name: nios
|
||||||
short_description: Query Infoblox NIOS objects
|
short_description: Query Infoblox NIOS objects
|
||||||
|
deprecated:
|
||||||
|
why: Please install the infoblox.nios_modules collection and use the corresponding lookup from it.
|
||||||
|
alternative: infoblox.nios_modules.nios_lookup
|
||||||
|
removed_in: 5.0.0
|
||||||
description:
|
description:
|
||||||
- Uses the Infoblox WAPI API to fetch NIOS specified objects. This lookup
|
- Uses the Infoblox WAPI API to fetch NIOS specified objects. This lookup
|
||||||
supports adding additional keywords to filter the return data and specify
|
supports adding additional keywords to filter the return data and specify
|
||||||
|
|||||||
@@ -25,6 +25,10 @@ DOCUMENTATION = '''
|
|||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: nios_next_ip
|
name: nios_next_ip
|
||||||
short_description: Return the next available IP address for a network
|
short_description: Return the next available IP address for a network
|
||||||
|
deprecated:
|
||||||
|
why: Please install the infoblox.nios_modules collection and use the corresponding lookup from it.
|
||||||
|
alternative: infoblox.nios_modules.nios_next_ip
|
||||||
|
removed_in: 5.0.0
|
||||||
description:
|
description:
|
||||||
- Uses the Infoblox WAPI API to return the next available IP addresses
|
- Uses the Infoblox WAPI API to return the next available IP addresses
|
||||||
for a given network CIDR
|
for a given network CIDR
|
||||||
@@ -70,7 +74,7 @@ _list:
|
|||||||
|
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
from ansible_collections.community.general.plugins.module_utils.net_tools.nios.api import WapiLookup
|
from ansible_collections.community.general.plugins.module_utils.net_tools.nios.api import WapiLookup
|
||||||
from ansible.module_utils._text import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -25,6 +25,10 @@ DOCUMENTATION = '''
|
|||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: nios_next_network
|
name: nios_next_network
|
||||||
short_description: Return the next available network range for a network-container
|
short_description: Return the next available network range for a network-container
|
||||||
|
deprecated:
|
||||||
|
why: Please install the infoblox.nios_modules collection and use the corresponding lookup from it.
|
||||||
|
alternative: infoblox.nios_modules.nios_next_network
|
||||||
|
removed_in: 5.0.0
|
||||||
description:
|
description:
|
||||||
- Uses the Infoblox WAPI API to return the next available network addresses for
|
- Uses the Infoblox WAPI API to return the next available network addresses for
|
||||||
a given network CIDR
|
a given network CIDR
|
||||||
@@ -80,7 +84,7 @@ _list:
|
|||||||
|
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
from ansible_collections.community.general.plugins.module_utils.net_tools.nios.api import WapiLookup
|
from ansible_collections.community.general.plugins.module_utils.net_tools.nios.api import WapiLookup
|
||||||
from ansible.module_utils._text import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -30,6 +30,11 @@ DOCUMENTATION = '''
|
|||||||
aliases: ['vault_password']
|
aliases: ['vault_password']
|
||||||
section:
|
section:
|
||||||
description: Item section containing the field to retrieve (case-insensitive). If absent will return first match from any section.
|
description: Item section containing the field to retrieve (case-insensitive). If absent will return first match from any section.
|
||||||
|
domain:
|
||||||
|
description: Domain of 1Password. Default is U(1password.com).
|
||||||
|
version_added: 3.2.0
|
||||||
|
default: '1password.com'
|
||||||
|
type: str
|
||||||
subdomain:
|
subdomain:
|
||||||
description: The 1Password subdomain to authenticate against.
|
description: The 1Password subdomain to authenticate against.
|
||||||
username:
|
username:
|
||||||
@@ -98,7 +103,7 @@ from subprocess import Popen, PIPE
|
|||||||
|
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
from ansible.errors import AnsibleLookupError
|
from ansible.errors import AnsibleLookupError
|
||||||
from ansible.module_utils._text import to_bytes, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
|
|
||||||
|
|
||||||
class OnePass(object):
|
class OnePass(object):
|
||||||
@@ -109,6 +114,7 @@ class OnePass(object):
|
|||||||
self.logged_in = False
|
self.logged_in = False
|
||||||
self.token = None
|
self.token = None
|
||||||
self.subdomain = None
|
self.subdomain = None
|
||||||
|
self.domain = None
|
||||||
self.username = None
|
self.username = None
|
||||||
self.secret_key = None
|
self.secret_key = None
|
||||||
self.master_password = None
|
self.master_password = None
|
||||||
@@ -168,7 +174,7 @@ class OnePass(object):
|
|||||||
|
|
||||||
args = [
|
args = [
|
||||||
'signin',
|
'signin',
|
||||||
'{0}.1password.com'.format(self.subdomain),
|
'{0}.{1}'.format(self.subdomain, self.domain),
|
||||||
to_bytes(self.username),
|
to_bytes(self.username),
|
||||||
to_bytes(self.secret_key),
|
to_bytes(self.secret_key),
|
||||||
'--output=raw',
|
'--output=raw',
|
||||||
@@ -265,6 +271,7 @@ class LookupModule(LookupBase):
|
|||||||
section = kwargs.get('section')
|
section = kwargs.get('section')
|
||||||
vault = kwargs.get('vault')
|
vault = kwargs.get('vault')
|
||||||
op.subdomain = kwargs.get('subdomain')
|
op.subdomain = kwargs.get('subdomain')
|
||||||
|
op.domain = kwargs.get('domain', '1password.com')
|
||||||
op.username = kwargs.get('username')
|
op.username = kwargs.get('username')
|
||||||
op.secret_key = kwargs.get('secret_key')
|
op.secret_key = kwargs.get('secret_key')
|
||||||
op.master_password = kwargs.get('master_password', kwargs.get('vault_password'))
|
op.master_password = kwargs.get('master_password', kwargs.get('vault_password'))
|
||||||
|
|||||||
@@ -25,9 +25,9 @@ DOCUMENTATION = '''
|
|||||||
env:
|
env:
|
||||||
- name: PASSWORD_STORE_DIR
|
- name: PASSWORD_STORE_DIR
|
||||||
create:
|
create:
|
||||||
description: Create the password if it does not already exist.
|
description: Create the password if it does not already exist. Takes precedence over C(missing).
|
||||||
type: bool
|
type: bool
|
||||||
default: 'no'
|
default: false
|
||||||
overwrite:
|
overwrite:
|
||||||
description: Overwrite the password if it does already exist.
|
description: Overwrite the password if it does already exist.
|
||||||
type: bool
|
type: bool
|
||||||
@@ -60,6 +60,22 @@ DOCUMENTATION = '''
|
|||||||
description: use alphanumeric characters.
|
description: use alphanumeric characters.
|
||||||
type: bool
|
type: bool
|
||||||
default: 'no'
|
default: 'no'
|
||||||
|
missing:
|
||||||
|
description:
|
||||||
|
- List of preference about what to do if the password file is missing.
|
||||||
|
- If I(create=true), the value for this option is ignored and assumed to be C(create).
|
||||||
|
- If set to C(error), the lookup will error out if the passname does not exist.
|
||||||
|
- If set to C(create), the passname will be created with the provided length I(length) if it does not exist.
|
||||||
|
- If set to C(empty) or C(warn), will return a C(none) in case the passname does not exist.
|
||||||
|
When using C(lookup) and not C(query), this will be translated to an empty string.
|
||||||
|
version_added: 3.1.0
|
||||||
|
type: str
|
||||||
|
default: error
|
||||||
|
choices:
|
||||||
|
- error
|
||||||
|
- warn
|
||||||
|
- empty
|
||||||
|
- create
|
||||||
'''
|
'''
|
||||||
EXAMPLES = """
|
EXAMPLES = """
|
||||||
# Debug is used for examples, BAD IDEA to show passwords on screen
|
# Debug is used for examples, BAD IDEA to show passwords on screen
|
||||||
@@ -67,12 +83,28 @@ EXAMPLES = """
|
|||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: "{{ lookup('community.general.passwordstore', 'example/test')}}"
|
msg: "{{ lookup('community.general.passwordstore', 'example/test')}}"
|
||||||
|
|
||||||
|
- name: Basic lookup. Warns if example/test does not exist and returns empty string
|
||||||
|
ansible.builtin.debug:
|
||||||
|
msg: "{{ lookup('community.general.passwordstore', 'example/test missing=warn')}}"
|
||||||
|
|
||||||
- name: Create pass with random 16 character password. If password exists just give the password
|
- name: Create pass with random 16 character password. If password exists just give the password
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
var: mypassword
|
var: mypassword
|
||||||
vars:
|
vars:
|
||||||
mypassword: "{{ lookup('community.general.passwordstore', 'example/test create=true')}}"
|
mypassword: "{{ lookup('community.general.passwordstore', 'example/test create=true')}}"
|
||||||
|
|
||||||
|
- name: Create pass with random 16 character password. If password exists just give the password
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: mypassword
|
||||||
|
vars:
|
||||||
|
mypassword: "{{ lookup('community.general.passwordstore', 'example/test missing=create')}}"
|
||||||
|
|
||||||
|
- name: Prints 'abc' if example/test does not exist, just give the password otherwise
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: mypassword
|
||||||
|
vars:
|
||||||
|
mypassword: "{{ lookup('community.general.passwordstore', 'example/test missing=empty') | default('abc', true) }}"
|
||||||
|
|
||||||
- name: Different size password
|
- name: Different size password
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: "{{ lookup('community.general.passwordstore', 'example/test create=true length=42')}}"
|
msg: "{{ lookup('community.general.passwordstore', 'example/test create=true length=42')}}"
|
||||||
@@ -110,11 +142,14 @@ import yaml
|
|||||||
|
|
||||||
from distutils import util
|
from distutils import util
|
||||||
from ansible.errors import AnsibleError, AnsibleAssertionError
|
from ansible.errors import AnsibleError, AnsibleAssertionError
|
||||||
from ansible.module_utils._text import to_bytes, to_native, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||||
|
from ansible.utils.display import Display
|
||||||
from ansible.utils.encrypt import random_password
|
from ansible.utils.encrypt import random_password
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
from ansible import constants as C
|
from ansible import constants as C
|
||||||
|
|
||||||
|
display = Display()
|
||||||
|
|
||||||
|
|
||||||
# backhacked check_output with input for python 2.7
|
# backhacked check_output with input for python 2.7
|
||||||
# http://stackoverflow.com/questions/10103551/passing-data-to-subprocess-check-output
|
# http://stackoverflow.com/questions/10103551/passing-data-to-subprocess-check-output
|
||||||
@@ -178,12 +213,17 @@ class LookupModule(LookupBase):
|
|||||||
self.paramvals[key] = util.strtobool(self.paramvals[key])
|
self.paramvals[key] = util.strtobool(self.paramvals[key])
|
||||||
except (ValueError, AssertionError) as e:
|
except (ValueError, AssertionError) as e:
|
||||||
raise AnsibleError(e)
|
raise AnsibleError(e)
|
||||||
|
if self.paramvals['missing'] not in ['error', 'warn', 'create', 'empty']:
|
||||||
|
raise AnsibleError("{0} is not a valid option for missing".format(self.paramvals['missing']))
|
||||||
if not isinstance(self.paramvals['length'], int):
|
if not isinstance(self.paramvals['length'], int):
|
||||||
if self.paramvals['length'].isdigit():
|
if self.paramvals['length'].isdigit():
|
||||||
self.paramvals['length'] = int(self.paramvals['length'])
|
self.paramvals['length'] = int(self.paramvals['length'])
|
||||||
else:
|
else:
|
||||||
raise AnsibleError("{0} is not a correct value for length".format(self.paramvals['length']))
|
raise AnsibleError("{0} is not a correct value for length".format(self.paramvals['length']))
|
||||||
|
|
||||||
|
if self.paramvals['create']:
|
||||||
|
self.paramvals['missing'] = 'create'
|
||||||
|
|
||||||
# Collect pass environment variables from the plugin's parameters.
|
# Collect pass environment variables from the plugin's parameters.
|
||||||
self.env = os.environ.copy()
|
self.env = os.environ.copy()
|
||||||
|
|
||||||
@@ -224,9 +264,11 @@ class LookupModule(LookupBase):
|
|||||||
if e.returncode != 0 and 'not in the password store' in e.output:
|
if e.returncode != 0 and 'not in the password store' in e.output:
|
||||||
# if pass returns 1 and return string contains 'is not in the password store.'
|
# if pass returns 1 and return string contains 'is not in the password store.'
|
||||||
# We need to determine if this is valid or Error.
|
# We need to determine if this is valid or Error.
|
||||||
if not self.paramvals['create']:
|
if self.paramvals['missing'] == 'error':
|
||||||
raise AnsibleError('passname: {0} not found, use create=True'.format(self.passname))
|
raise AnsibleError('passwordstore: passname {0} not found and missing=error is set'.format(self.passname))
|
||||||
else:
|
else:
|
||||||
|
if self.paramvals['missing'] == 'warn':
|
||||||
|
display.warning('passwordstore: passname {0} not found'.format(self.passname))
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
raise AnsibleError(e)
|
raise AnsibleError(e)
|
||||||
@@ -294,6 +336,7 @@ class LookupModule(LookupBase):
|
|||||||
'userpass': '',
|
'userpass': '',
|
||||||
'length': 16,
|
'length': 16,
|
||||||
'backup': False,
|
'backup': False,
|
||||||
|
'missing': 'error',
|
||||||
}
|
}
|
||||||
|
|
||||||
for term in terms:
|
for term in terms:
|
||||||
@@ -304,6 +347,9 @@ class LookupModule(LookupBase):
|
|||||||
else:
|
else:
|
||||||
result.append(self.get_passresult())
|
result.append(self.get_passresult())
|
||||||
else: # password does not exist
|
else: # password does not exist
|
||||||
if self.paramvals['create']:
|
if self.paramvals['missing'] == 'create':
|
||||||
result.append(self.generate_password())
|
result.append(self.generate_password())
|
||||||
|
else:
|
||||||
|
result.append(None)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|||||||
99
plugins/lookup/random_pet.py
Normal file
99
plugins/lookup/random_pet.py
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright: (c) 2021, Abhijeet Kasurde <akasurde@redhat.com>
|
||||||
|
# Copyright: (c) 2018, Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
DOCUMENTATION = r'''
|
||||||
|
name: random_pet
|
||||||
|
author:
|
||||||
|
- Abhijeet Kasurde (@Akasurde)
|
||||||
|
short_description: Generates random pet names
|
||||||
|
version_added: '3.1.0'
|
||||||
|
requirements:
|
||||||
|
- petname U(https://github.com/dustinkirkland/python-petname)
|
||||||
|
description:
|
||||||
|
- Generates random pet names that can be used as unique identifiers for the resources.
|
||||||
|
options:
|
||||||
|
words:
|
||||||
|
description:
|
||||||
|
- The number of words in the pet name.
|
||||||
|
default: 2
|
||||||
|
type: int
|
||||||
|
length:
|
||||||
|
description:
|
||||||
|
- The maximal length of every component of the pet name.
|
||||||
|
- Values below 3 will be set to 3 by petname.
|
||||||
|
default: 6
|
||||||
|
type: int
|
||||||
|
prefix:
|
||||||
|
description: A string to prefix with the name.
|
||||||
|
type: str
|
||||||
|
separator:
|
||||||
|
description: The character to separate words in the pet name.
|
||||||
|
default: "-"
|
||||||
|
type: str
|
||||||
|
'''
|
||||||
|
|
||||||
|
EXAMPLES = r'''
|
||||||
|
- name: Generate pet name
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: lookup('community.general.random_pet')
|
||||||
|
# Example result: 'loving-raptor'
|
||||||
|
|
||||||
|
- name: Generate pet name with 3 words
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: lookup('community.general.random_pet', words=3)
|
||||||
|
# Example result: 'fully-fresh-macaw'
|
||||||
|
|
||||||
|
- name: Generate pet name with separator
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: lookup('community.general.random_pet', separator="_")
|
||||||
|
# Example result: 'causal_snipe'
|
||||||
|
|
||||||
|
- name: Generate pet name with length
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: lookup('community.general.random_pet', length=7)
|
||||||
|
# Example result: 'natural-peacock'
|
||||||
|
'''
|
||||||
|
|
||||||
|
RETURN = r'''
|
||||||
|
_raw:
|
||||||
|
description: A one-element list containing a random pet name
|
||||||
|
type: list
|
||||||
|
elements: str
|
||||||
|
'''
|
||||||
|
|
||||||
|
try:
|
||||||
|
import petname
|
||||||
|
|
||||||
|
HAS_PETNAME = True
|
||||||
|
except ImportError:
|
||||||
|
HAS_PETNAME = False
|
||||||
|
|
||||||
|
from ansible.errors import AnsibleError
|
||||||
|
from ansible.plugins.lookup import LookupBase
|
||||||
|
|
||||||
|
|
||||||
|
class LookupModule(LookupBase):
|
||||||
|
|
||||||
|
def run(self, terms, variables=None, **kwargs):
|
||||||
|
|
||||||
|
if not HAS_PETNAME:
|
||||||
|
raise AnsibleError('Python petname library is required. '
|
||||||
|
'Please install using "pip install petname"')
|
||||||
|
|
||||||
|
self.set_options(var_options=variables, direct=kwargs)
|
||||||
|
words = self.get_option('words')
|
||||||
|
length = self.get_option('length')
|
||||||
|
prefix = self.get_option('prefix')
|
||||||
|
separator = self.get_option('separator')
|
||||||
|
|
||||||
|
values = petname.Generate(words=words, separator=separator, letters=length)
|
||||||
|
if prefix:
|
||||||
|
values = "%s%s%s" % (prefix, separator, values)
|
||||||
|
|
||||||
|
return [values]
|
||||||
220
plugins/lookup/random_string.py
Normal file
220
plugins/lookup/random_string.py
Normal file
@@ -0,0 +1,220 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright: (c) 2021, Abhijeet Kasurde <akasurde@redhat.com>
|
||||||
|
# Copyright: (c) 2018, Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
DOCUMENTATION = r"""
|
||||||
|
name: random_string
|
||||||
|
author:
|
||||||
|
- Abhijeet Kasurde (@Akasurde)
|
||||||
|
short_description: Generates random string
|
||||||
|
version_added: '3.2.0'
|
||||||
|
description:
|
||||||
|
- Generates random string based upon the given constraints.
|
||||||
|
options:
|
||||||
|
length:
|
||||||
|
description: The length of the string.
|
||||||
|
default: 8
|
||||||
|
type: int
|
||||||
|
upper:
|
||||||
|
description:
|
||||||
|
- Include uppercase letters in the string.
|
||||||
|
default: true
|
||||||
|
type: bool
|
||||||
|
lower:
|
||||||
|
description:
|
||||||
|
- Include lowercase letters in the string.
|
||||||
|
default: true
|
||||||
|
type: bool
|
||||||
|
numbers:
|
||||||
|
description:
|
||||||
|
- Include numbers in the string.
|
||||||
|
default: true
|
||||||
|
type: bool
|
||||||
|
special:
|
||||||
|
description:
|
||||||
|
- Include special characters in the string.
|
||||||
|
- Special characters are taken from Python standard library C(string).
|
||||||
|
See L(the documentation of string.punctuation,https://docs.python.org/3/library/string.html#string.punctuation)
|
||||||
|
for which characters will be used.
|
||||||
|
- The choice of special characters can be changed to setting I(override_special).
|
||||||
|
default: true
|
||||||
|
type: bool
|
||||||
|
min_numeric:
|
||||||
|
description:
|
||||||
|
- Minimum number of numeric characters in the string.
|
||||||
|
- If set, overrides I(numbers=false).
|
||||||
|
default: 0
|
||||||
|
type: int
|
||||||
|
min_upper:
|
||||||
|
description:
|
||||||
|
- Minimum number of uppercase alphabets in the string.
|
||||||
|
- If set, overrides I(upper=false).
|
||||||
|
default: 0
|
||||||
|
type: int
|
||||||
|
min_lower:
|
||||||
|
description:
|
||||||
|
- Minimum number of lowercase alphabets in the string.
|
||||||
|
- If set, overrides I(lower=false).
|
||||||
|
default: 0
|
||||||
|
type: int
|
||||||
|
min_special:
|
||||||
|
description:
|
||||||
|
- Minimum number of special character in the string.
|
||||||
|
default: 0
|
||||||
|
type: int
|
||||||
|
override_special:
|
||||||
|
description:
|
||||||
|
- Overide a list of special characters to use in the string.
|
||||||
|
- If set I(min_special) should be set to a non-default value.
|
||||||
|
type: str
|
||||||
|
override_all:
|
||||||
|
description:
|
||||||
|
- Override all values of I(numbers), I(upper), I(lower), and I(special) with
|
||||||
|
the given list of characters.
|
||||||
|
type: str
|
||||||
|
base64:
|
||||||
|
description:
|
||||||
|
- Returns base64 encoded string.
|
||||||
|
type: bool
|
||||||
|
default: false
|
||||||
|
"""
|
||||||
|
|
||||||
|
EXAMPLES = r"""
|
||||||
|
- name: Generate random string
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: lookup('community.general.random_string')
|
||||||
|
# Example result: ['DeadBeeF']
|
||||||
|
|
||||||
|
- name: Generate random string with length 12
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: lookup('community.general.random_string', length=12)
|
||||||
|
# Example result: ['Uan0hUiX5kVG']
|
||||||
|
|
||||||
|
- name: Generate base64 encoded random string
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: lookup('community.general.random_string', base64=True)
|
||||||
|
# Example result: ['NHZ6eWN5Qk0=']
|
||||||
|
|
||||||
|
- name: Generate a random string with 1 lower, 1 upper, 1 number and 1 special char (atleast)
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: lookup('community.general.random_string', min_lower=1, min_upper=1, min_special=1, min_numeric=1)
|
||||||
|
# Example result: ['&Qw2|E[-']
|
||||||
|
|
||||||
|
- name: Generate a random string with all lower case characters
|
||||||
|
debug:
|
||||||
|
var: query('community.general.random_string', upper=false, numbers=false, special=false)
|
||||||
|
# Example result: ['exolxzyz']
|
||||||
|
|
||||||
|
- name: Generate random hexadecimal string
|
||||||
|
debug:
|
||||||
|
var: query('community.general.random_string', upper=false, lower=false, override_special=hex_chars, numbers=false)
|
||||||
|
vars:
|
||||||
|
hex_chars: '0123456789ABCDEF'
|
||||||
|
# Example result: ['D2A40737']
|
||||||
|
|
||||||
|
- name: Generate random hexadecimal string with override_all
|
||||||
|
debug:
|
||||||
|
var: query('community.general.random_string', override_all=hex_chars)
|
||||||
|
vars:
|
||||||
|
hex_chars: '0123456789ABCDEF'
|
||||||
|
# Example result: ['D2A40737']
|
||||||
|
"""
|
||||||
|
|
||||||
|
RETURN = r"""
|
||||||
|
_raw:
|
||||||
|
description: A one-element list containing a random string
|
||||||
|
type: list
|
||||||
|
elements: str
|
||||||
|
"""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
|
||||||
|
from ansible.errors import AnsibleLookupError
|
||||||
|
from ansible.plugins.lookup import LookupBase
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
|
|
||||||
|
|
||||||
|
class LookupModule(LookupBase):
|
||||||
|
@staticmethod
|
||||||
|
def get_random(random_generator, chars, length):
|
||||||
|
if not chars:
|
||||||
|
raise AnsibleLookupError(
|
||||||
|
"Available characters cannot be None, please change constraints"
|
||||||
|
)
|
||||||
|
return "".join(random_generator.choice(chars) for dummy in range(length))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def b64encode(string_value, encoding="utf-8"):
|
||||||
|
return to_text(
|
||||||
|
base64.b64encode(
|
||||||
|
to_bytes(string_value, encoding=encoding, errors="surrogate_or_strict")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def run(self, terms, variables=None, **kwargs):
|
||||||
|
number_chars = string.digits
|
||||||
|
lower_chars = string.ascii_lowercase
|
||||||
|
upper_chars = string.ascii_uppercase
|
||||||
|
special_chars = string.punctuation
|
||||||
|
random_generator = random.SystemRandom()
|
||||||
|
|
||||||
|
self.set_options(var_options=variables, direct=kwargs)
|
||||||
|
|
||||||
|
length = self.get_option("length")
|
||||||
|
base64_flag = self.get_option("base64")
|
||||||
|
override_all = self.get_option("override_all")
|
||||||
|
values = ""
|
||||||
|
available_chars_set = ""
|
||||||
|
|
||||||
|
if override_all:
|
||||||
|
# Override all the values
|
||||||
|
available_chars_set = override_all
|
||||||
|
else:
|
||||||
|
upper = self.get_option("upper")
|
||||||
|
lower = self.get_option("lower")
|
||||||
|
numbers = self.get_option("numbers")
|
||||||
|
special = self.get_option("special")
|
||||||
|
override_special = self.get_option("override_special")
|
||||||
|
|
||||||
|
if override_special:
|
||||||
|
special_chars = override_special
|
||||||
|
|
||||||
|
if upper:
|
||||||
|
available_chars_set += upper_chars
|
||||||
|
if lower:
|
||||||
|
available_chars_set += lower_chars
|
||||||
|
if numbers:
|
||||||
|
available_chars_set += number_chars
|
||||||
|
if special:
|
||||||
|
available_chars_set += special_chars
|
||||||
|
|
||||||
|
mapping = {
|
||||||
|
"min_numeric": number_chars,
|
||||||
|
"min_lower": lower_chars,
|
||||||
|
"min_upper": upper_chars,
|
||||||
|
"min_special": special_chars,
|
||||||
|
}
|
||||||
|
|
||||||
|
for m in mapping:
|
||||||
|
if self.get_option(m):
|
||||||
|
values += self.get_random(random_generator, mapping[m], self.get_option(m))
|
||||||
|
|
||||||
|
remaining_pass_len = length - len(values)
|
||||||
|
values += self.get_random(random_generator, available_chars_set, remaining_pass_len)
|
||||||
|
|
||||||
|
# Get pseudo randomization
|
||||||
|
shuffled_values = list(values)
|
||||||
|
# Randomize the order
|
||||||
|
random.shuffle(shuffled_values)
|
||||||
|
|
||||||
|
if base64_flag:
|
||||||
|
return [self.b64encode("".join(shuffled_values))]
|
||||||
|
|
||||||
|
return ["".join(shuffled_values)]
|
||||||
@@ -80,7 +80,7 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
from ansible.module_utils._text import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
|
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ import shelve
|
|||||||
|
|
||||||
from ansible.errors import AnsibleError, AnsibleAssertionError
|
from ansible.errors import AnsibleError, AnsibleAssertionError
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
from ansible.module_utils._text import to_bytes, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
|
|
||||||
|
|
||||||
class LookupModule(LookupBase):
|
class LookupModule(LookupBase):
|
||||||
|
|||||||
@@ -112,17 +112,16 @@ EXAMPLES = r"""
|
|||||||
- ansible.builtin.debug:
|
- ansible.builtin.debug:
|
||||||
msg: the password is {{ secret_password }}
|
msg: the password is {{ secret_password }}
|
||||||
"""
|
"""
|
||||||
from distutils.version import LooseVersion
|
|
||||||
from ansible.errors import AnsibleError, AnsibleOptionsError
|
from ansible.errors import AnsibleError, AnsibleOptionsError
|
||||||
|
|
||||||
sdk_is_missing = False
|
sdk_is_missing = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from thycotic import __version__ as sdk_version
|
|
||||||
from thycotic.secrets.server import (
|
from thycotic.secrets.server import (
|
||||||
SecretServer,
|
SecretServer,
|
||||||
|
SecretServerAccessError,
|
||||||
SecretServerError,
|
SecretServerError,
|
||||||
PasswordGrantAuthorizer,
|
|
||||||
)
|
)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
sdk_is_missing = True
|
sdk_is_missing = True
|
||||||
@@ -137,20 +136,7 @@ display = Display()
|
|||||||
class LookupModule(LookupBase):
|
class LookupModule(LookupBase):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def Client(server_parameters):
|
def Client(server_parameters):
|
||||||
|
return SecretServer(**server_parameters)
|
||||||
if LooseVersion(sdk_version) < LooseVersion('1.0.0'):
|
|
||||||
return SecretServer(**server_parameters)
|
|
||||||
else:
|
|
||||||
authorizer = PasswordGrantAuthorizer(
|
|
||||||
server_parameters["base_url"],
|
|
||||||
server_parameters["username"],
|
|
||||||
server_parameters["password"],
|
|
||||||
server_parameters["token_path_uri"],
|
|
||||||
)
|
|
||||||
|
|
||||||
return SecretServer(
|
|
||||||
server_parameters["base_url"], authorizer, server_parameters["api_path_uri"]
|
|
||||||
)
|
|
||||||
|
|
||||||
def run(self, terms, variables, **kwargs):
|
def run(self, terms, variables, **kwargs):
|
||||||
if sdk_is_missing:
|
if sdk_is_missing:
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
|||||||
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
|
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
|
||||||
from ansible.module_utils.urls import open_url
|
from ansible.module_utils.urls import open_url
|
||||||
from ansible.module_utils.api import basic_auth_argument_spec
|
from ansible.module_utils.api import basic_auth_argument_spec
|
||||||
from ansible.module_utils._text import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from ansible.module_utils.ansible_release import __version__ as ansible_version
|
from ansible.module_utils.ansible_release import __version__ as ansible_version
|
||||||
|
|||||||
@@ -1,871 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
|
||||||
# Copyright (c) 2016 Red Hat, Inc.
|
|
||||||
#
|
|
||||||
# This file is part of Ansible
|
|
||||||
#
|
|
||||||
# Ansible is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# Ansible is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
#
|
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import inspect
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
|
|
||||||
from abc import ABCMeta, abstractmethod
|
|
||||||
from datetime import datetime
|
|
||||||
from distutils.version import LooseVersion
|
|
||||||
|
|
||||||
from ansible_collections.community.general.plugins.module_utils.cloud import CloudRetry
|
|
||||||
from ansible.module_utils.common._collections_compat import Mapping
|
|
||||||
|
|
||||||
try:
|
|
||||||
from enum import Enum # enum is a ovirtsdk4 requirement
|
|
||||||
import ovirtsdk4 as sdk
|
|
||||||
import ovirtsdk4.version as sdk_version
|
|
||||||
import ovirtsdk4.types as otypes
|
|
||||||
HAS_SDK = LooseVersion(sdk_version.VERSION) >= LooseVersion('4.3.0')
|
|
||||||
except ImportError:
|
|
||||||
HAS_SDK = False
|
|
||||||
|
|
||||||
|
|
||||||
BYTES_MAP = {
|
|
||||||
'kib': 2**10,
|
|
||||||
'mib': 2**20,
|
|
||||||
'gib': 2**30,
|
|
||||||
'tib': 2**40,
|
|
||||||
'pib': 2**50,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def check_sdk(module):
|
|
||||||
if not HAS_SDK:
|
|
||||||
module.fail_json(
|
|
||||||
msg='ovirtsdk4 version 4.3.0 or higher is required for this module'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_dict_of_struct(struct, connection=None, fetch_nested=False, attributes=None):
|
|
||||||
"""
|
|
||||||
Convert SDK Struct type into dictionary.
|
|
||||||
"""
|
|
||||||
res = {}
|
|
||||||
|
|
||||||
def resolve_href(value):
|
|
||||||
# Fetch nested values of struct:
|
|
||||||
try:
|
|
||||||
value = connection.follow_link(value)
|
|
||||||
except sdk.Error:
|
|
||||||
value = None
|
|
||||||
nested_obj = dict(
|
|
||||||
(attr, convert_value(getattr(value, attr)))
|
|
||||||
for attr in attributes if getattr(value, attr, None) is not None
|
|
||||||
)
|
|
||||||
nested_obj['id'] = getattr(value, 'id', None)
|
|
||||||
nested_obj['href'] = getattr(value, 'href', None)
|
|
||||||
return nested_obj
|
|
||||||
|
|
||||||
def remove_underscore(val):
|
|
||||||
if val.startswith('_'):
|
|
||||||
val = val[1:]
|
|
||||||
remove_underscore(val)
|
|
||||||
return val
|
|
||||||
|
|
||||||
def convert_value(value):
|
|
||||||
nested = False
|
|
||||||
|
|
||||||
if isinstance(value, sdk.Struct):
|
|
||||||
if not fetch_nested or not value.href:
|
|
||||||
return get_dict_of_struct(value)
|
|
||||||
return resolve_href(value)
|
|
||||||
|
|
||||||
elif isinstance(value, Enum) or isinstance(value, datetime):
|
|
||||||
return str(value)
|
|
||||||
elif isinstance(value, list) or isinstance(value, sdk.List):
|
|
||||||
if isinstance(value, sdk.List) and fetch_nested and value.href:
|
|
||||||
try:
|
|
||||||
value = connection.follow_link(value)
|
|
||||||
nested = True
|
|
||||||
except sdk.Error:
|
|
||||||
value = []
|
|
||||||
|
|
||||||
ret = []
|
|
||||||
for i in value:
|
|
||||||
if isinstance(i, sdk.Struct):
|
|
||||||
if not nested and fetch_nested and i.href:
|
|
||||||
ret.append(resolve_href(i))
|
|
||||||
elif not nested:
|
|
||||||
ret.append(get_dict_of_struct(i))
|
|
||||||
else:
|
|
||||||
nested_obj = dict(
|
|
||||||
(attr, convert_value(getattr(i, attr)))
|
|
||||||
for attr in attributes if getattr(i, attr, None)
|
|
||||||
)
|
|
||||||
nested_obj['id'] = getattr(i, 'id', None)
|
|
||||||
ret.append(nested_obj)
|
|
||||||
elif isinstance(i, Enum):
|
|
||||||
ret.append(str(i))
|
|
||||||
else:
|
|
||||||
ret.append(i)
|
|
||||||
return ret
|
|
||||||
else:
|
|
||||||
return value
|
|
||||||
|
|
||||||
if struct is not None:
|
|
||||||
for key, value in struct.__dict__.items():
|
|
||||||
if value is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
key = remove_underscore(key)
|
|
||||||
res[key] = convert_value(value)
|
|
||||||
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def engine_version(connection):
|
|
||||||
"""
|
|
||||||
Return string representation of oVirt engine version.
|
|
||||||
"""
|
|
||||||
engine_api = connection.system_service().get()
|
|
||||||
engine_version = engine_api.product_info.version
|
|
||||||
return '%s.%s' % (engine_version.major, engine_version.minor)
|
|
||||||
|
|
||||||
|
|
||||||
def create_connection(auth):
|
|
||||||
"""
|
|
||||||
Create a connection to Python SDK, from task `auth` parameter.
|
|
||||||
If user doesnt't have SSO token the `auth` dictionary has following parameters mandatory:
|
|
||||||
url, username, password
|
|
||||||
|
|
||||||
If user has SSO token the `auth` dictionary has following parameters mandatory:
|
|
||||||
url, token
|
|
||||||
|
|
||||||
The `ca_file` parameter is mandatory in case user want to use secure connection,
|
|
||||||
in case user want to use insecure connection, it's mandatory to send insecure=True.
|
|
||||||
|
|
||||||
:param auth: dictionary which contains needed values for connection creation
|
|
||||||
:return: Python SDK connection
|
|
||||||
"""
|
|
||||||
|
|
||||||
url = auth.get('url')
|
|
||||||
if url is None and auth.get('hostname') is not None:
|
|
||||||
url = 'https://{0}/ovirt-engine/api'.format(auth.get('hostname'))
|
|
||||||
|
|
||||||
return sdk.Connection(
|
|
||||||
url=url,
|
|
||||||
username=auth.get('username'),
|
|
||||||
password=auth.get('password'),
|
|
||||||
ca_file=auth.get('ca_file', None),
|
|
||||||
insecure=auth.get('insecure', False),
|
|
||||||
token=auth.get('token', None),
|
|
||||||
kerberos=auth.get('kerberos', None),
|
|
||||||
headers=auth.get('headers', None),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def convert_to_bytes(param):
|
|
||||||
"""
|
|
||||||
This method convert units to bytes, which follow IEC standard.
|
|
||||||
|
|
||||||
:param param: value to be converted
|
|
||||||
"""
|
|
||||||
if param is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Get rid of whitespaces:
|
|
||||||
param = ''.join(param.split())
|
|
||||||
|
|
||||||
# Convert to bytes:
|
|
||||||
if len(param) > 3 and param[-3].lower() in ['k', 'm', 'g', 't', 'p']:
|
|
||||||
return int(param[:-3]) * BYTES_MAP.get(param[-3:].lower(), 1)
|
|
||||||
elif param.isdigit():
|
|
||||||
return int(param) * 2**10
|
|
||||||
else:
|
|
||||||
raise ValueError(
|
|
||||||
"Unsupported value(IEC supported): '{value}'".format(value=param)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def follow_link(connection, link):
|
|
||||||
"""
|
|
||||||
This method returns the entity of the element which link points to.
|
|
||||||
|
|
||||||
:param connection: connection to the Python SDK
|
|
||||||
:param link: link of the entity
|
|
||||||
:return: entity which link points to
|
|
||||||
"""
|
|
||||||
|
|
||||||
if link:
|
|
||||||
return connection.follow_link(link)
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_link_name(connection, link):
|
|
||||||
"""
|
|
||||||
This method returns the name of the element which link points to.
|
|
||||||
|
|
||||||
:param connection: connection to the Python SDK
|
|
||||||
:param link: link of the entity
|
|
||||||
:return: name of the entity, which link points to
|
|
||||||
"""
|
|
||||||
|
|
||||||
if link:
|
|
||||||
return connection.follow_link(link).name
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def equal(param1, param2, ignore_case=False):
|
|
||||||
"""
|
|
||||||
Compare two parameters and return if they are equal.
|
|
||||||
This parameter doesn't run equal operation if first parameter is None.
|
|
||||||
With this approach we don't run equal operation in case user don't
|
|
||||||
specify parameter in their task.
|
|
||||||
|
|
||||||
:param param1: user inputted parameter
|
|
||||||
:param param2: value of entity parameter
|
|
||||||
:return: True if parameters are equal or first parameter is None, otherwise False
|
|
||||||
"""
|
|
||||||
if param1 is not None:
|
|
||||||
if ignore_case:
|
|
||||||
return param1.lower() == param2.lower()
|
|
||||||
return param1 == param2
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def search_by_attributes(service, list_params=None, **kwargs):
|
|
||||||
"""
|
|
||||||
Search for the entity by attributes. Nested entities don't support search
|
|
||||||
via REST, so in case using search for nested entity we return all entities
|
|
||||||
and filter them by specified attributes.
|
|
||||||
"""
|
|
||||||
list_params = list_params or {}
|
|
||||||
# Check if 'list' method support search(look for search parameter):
|
|
||||||
if 'search' in inspect.getargspec(service.list)[0]:
|
|
||||||
res = service.list(
|
|
||||||
# There must be double quotes around name, because some oVirt resources it's possible to create then with space in name.
|
|
||||||
search=' and '.join('{0}="{1}"'.format(k, v) for k, v in kwargs.items()),
|
|
||||||
**list_params
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
res = [
|
|
||||||
e for e in service.list(**list_params) if len([
|
|
||||||
k for k, v in kwargs.items() if getattr(e, k, None) == v
|
|
||||||
]) == len(kwargs)
|
|
||||||
]
|
|
||||||
|
|
||||||
res = res or [None]
|
|
||||||
return res[0]
|
|
||||||
|
|
||||||
|
|
||||||
def search_by_name(service, name, **kwargs):
|
|
||||||
"""
|
|
||||||
Search for the entity by its name. Nested entities don't support search
|
|
||||||
via REST, so in case using search for nested entity we return all entities
|
|
||||||
and filter them by name.
|
|
||||||
|
|
||||||
:param service: service of the entity
|
|
||||||
:param name: name of the entity
|
|
||||||
:return: Entity object returned by Python SDK
|
|
||||||
"""
|
|
||||||
# Check if 'list' method support search(look for search parameter):
|
|
||||||
if 'search' in inspect.getargspec(service.list)[0]:
|
|
||||||
res = service.list(
|
|
||||||
# There must be double quotes around name, because some oVirt resources it's possible to create then with space in name.
|
|
||||||
search='name="{name}"'.format(name=name)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
res = [e for e in service.list() if e.name == name]
|
|
||||||
|
|
||||||
if kwargs:
|
|
||||||
res = [
|
|
||||||
e for e in service.list() if len([
|
|
||||||
k for k, v in kwargs.items() if getattr(e, k, None) == v
|
|
||||||
]) == len(kwargs)
|
|
||||||
]
|
|
||||||
|
|
||||||
res = res or [None]
|
|
||||||
return res[0]
|
|
||||||
|
|
||||||
|
|
||||||
def get_entity(service, get_params=None):
|
|
||||||
"""
|
|
||||||
Ignore SDK Error in case of getting an entity from service.
|
|
||||||
"""
|
|
||||||
entity = None
|
|
||||||
try:
|
|
||||||
if get_params is not None:
|
|
||||||
entity = service.get(**get_params)
|
|
||||||
else:
|
|
||||||
entity = service.get()
|
|
||||||
except sdk.Error:
|
|
||||||
# We can get here 404, we should ignore it, in case
|
|
||||||
# of removing entity for example.
|
|
||||||
pass
|
|
||||||
return entity
|
|
||||||
|
|
||||||
|
|
||||||
def get_id_by_name(service, name, raise_error=True, ignore_case=False):
|
|
||||||
"""
|
|
||||||
Search an entity ID by it's name.
|
|
||||||
"""
|
|
||||||
entity = search_by_name(service, name)
|
|
||||||
|
|
||||||
if entity is not None:
|
|
||||||
return entity.id
|
|
||||||
|
|
||||||
if raise_error:
|
|
||||||
raise Exception("Entity '%s' was not found." % name)
|
|
||||||
|
|
||||||
|
|
||||||
def wait(
|
|
||||||
service,
|
|
||||||
condition,
|
|
||||||
fail_condition=lambda e: False,
|
|
||||||
timeout=180,
|
|
||||||
wait=True,
|
|
||||||
poll_interval=3,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Wait until entity fulfill expected condition.
|
|
||||||
|
|
||||||
:param service: service of the entity
|
|
||||||
:param condition: condition to be fulfilled
|
|
||||||
:param fail_condition: if this condition is true, raise Exception
|
|
||||||
:param timeout: max time to wait in seconds
|
|
||||||
:param wait: if True wait for condition, if False don't wait
|
|
||||||
:param poll_interval: Number of seconds we should wait until next condition check
|
|
||||||
"""
|
|
||||||
# Wait until the desired state of the entity:
|
|
||||||
if wait:
|
|
||||||
start = time.time()
|
|
||||||
while time.time() < start + timeout:
|
|
||||||
# Exit if the condition of entity is valid:
|
|
||||||
entity = get_entity(service)
|
|
||||||
if condition(entity):
|
|
||||||
return
|
|
||||||
elif fail_condition(entity):
|
|
||||||
raise Exception("Error while waiting on result state of the entity.")
|
|
||||||
|
|
||||||
# Sleep for `poll_interval` seconds if none of the conditions apply:
|
|
||||||
time.sleep(float(poll_interval))
|
|
||||||
|
|
||||||
raise Exception("Timeout exceed while waiting on result state of the entity.")
|
|
||||||
|
|
||||||
|
|
||||||
def __get_auth_dict():
|
|
||||||
OVIRT_URL = os.environ.get('OVIRT_URL')
|
|
||||||
OVIRT_HOSTNAME = os.environ.get('OVIRT_HOSTNAME')
|
|
||||||
OVIRT_USERNAME = os.environ.get('OVIRT_USERNAME')
|
|
||||||
OVIRT_PASSWORD = os.environ.get('OVIRT_PASSWORD')
|
|
||||||
OVIRT_TOKEN = os.environ.get('OVIRT_TOKEN')
|
|
||||||
OVIRT_CAFILE = os.environ.get('OVIRT_CAFILE')
|
|
||||||
OVIRT_INSECURE = OVIRT_CAFILE is None
|
|
||||||
|
|
||||||
env_vars = None
|
|
||||||
if OVIRT_URL is None and OVIRT_HOSTNAME is not None:
|
|
||||||
OVIRT_URL = 'https://{0}/ovirt-engine/api'.format(OVIRT_HOSTNAME)
|
|
||||||
if OVIRT_URL and ((OVIRT_USERNAME and OVIRT_PASSWORD) or OVIRT_TOKEN):
|
|
||||||
env_vars = {
|
|
||||||
'url': OVIRT_URL,
|
|
||||||
'username': OVIRT_USERNAME,
|
|
||||||
'password': OVIRT_PASSWORD,
|
|
||||||
'insecure': OVIRT_INSECURE,
|
|
||||||
'token': OVIRT_TOKEN,
|
|
||||||
'ca_file': OVIRT_CAFILE,
|
|
||||||
}
|
|
||||||
if env_vars is not None:
|
|
||||||
auth = dict(default=env_vars, type='dict')
|
|
||||||
else:
|
|
||||||
auth = dict(required=True, type='dict')
|
|
||||||
|
|
||||||
return auth
|
|
||||||
|
|
||||||
|
|
||||||
def ovirt_info_full_argument_spec(**kwargs):
|
|
||||||
"""
|
|
||||||
Extend parameters of info module with parameters which are common to all
|
|
||||||
oVirt info modules.
|
|
||||||
|
|
||||||
:param kwargs: kwargs to be extended
|
|
||||||
:return: extended dictionary with common parameters
|
|
||||||
"""
|
|
||||||
spec = dict(
|
|
||||||
auth=__get_auth_dict(),
|
|
||||||
fetch_nested=dict(default=False, type='bool'),
|
|
||||||
nested_attributes=dict(type='list', default=list()),
|
|
||||||
)
|
|
||||||
spec.update(kwargs)
|
|
||||||
return spec
|
|
||||||
|
|
||||||
|
|
||||||
# Left for third-party module compatibility
|
|
||||||
def ovirt_facts_full_argument_spec(**kwargs):
|
|
||||||
"""
|
|
||||||
This is deprecated. Please use ovirt_info_full_argument_spec instead!
|
|
||||||
|
|
||||||
:param kwargs: kwargs to be extended
|
|
||||||
:return: extended dictionary with common parameters
|
|
||||||
"""
|
|
||||||
return ovirt_info_full_argument_spec(**kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def ovirt_full_argument_spec(**kwargs):
|
|
||||||
"""
|
|
||||||
Extend parameters of module with parameters which are common to all oVirt modules.
|
|
||||||
|
|
||||||
:param kwargs: kwargs to be extended
|
|
||||||
:return: extended dictionary with common parameters
|
|
||||||
"""
|
|
||||||
spec = dict(
|
|
||||||
auth=__get_auth_dict(),
|
|
||||||
timeout=dict(default=180, type='int'),
|
|
||||||
wait=dict(default=True, type='bool'),
|
|
||||||
poll_interval=dict(default=3, type='int'),
|
|
||||||
fetch_nested=dict(default=False, type='bool'),
|
|
||||||
nested_attributes=dict(type='list', default=list()),
|
|
||||||
)
|
|
||||||
spec.update(kwargs)
|
|
||||||
return spec
|
|
||||||
|
|
||||||
|
|
||||||
def check_params(module):
|
|
||||||
"""
|
|
||||||
Most modules must have either `name` or `id` specified.
|
|
||||||
"""
|
|
||||||
if module.params.get('name') is None and module.params.get('id') is None:
|
|
||||||
module.fail_json(msg='"name" or "id" is required')
|
|
||||||
|
|
||||||
|
|
||||||
def engine_supported(connection, version):
|
|
||||||
return LooseVersion(engine_version(connection)) >= LooseVersion(version)
|
|
||||||
|
|
||||||
|
|
||||||
def check_support(version, connection, module, params):
|
|
||||||
"""
|
|
||||||
Check if parameters used by user are supported by oVirt Python SDK
|
|
||||||
and oVirt engine.
|
|
||||||
"""
|
|
||||||
api_version = LooseVersion(engine_version(connection))
|
|
||||||
version = LooseVersion(version)
|
|
||||||
for param in params:
|
|
||||||
if module.params.get(param) is not None:
|
|
||||||
return LooseVersion(sdk_version.VERSION) >= version and api_version >= version
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class BaseModule(object):
|
|
||||||
"""
|
|
||||||
This is base class for oVirt modules. oVirt modules should inherit this
|
|
||||||
class and override method to customize specific needs of the module.
|
|
||||||
The only abstract method of this class is `build_entity`, which must
|
|
||||||
to be implemented in child class.
|
|
||||||
"""
|
|
||||||
__metaclass__ = ABCMeta
|
|
||||||
|
|
||||||
def __init__(self, connection, module, service, changed=False):
|
|
||||||
self._connection = connection
|
|
||||||
self._module = module
|
|
||||||
self._service = service
|
|
||||||
self._changed = changed
|
|
||||||
self._diff = {'after': dict(), 'before': dict()}
|
|
||||||
|
|
||||||
@property
|
|
||||||
def changed(self):
|
|
||||||
return self._changed
|
|
||||||
|
|
||||||
@changed.setter
|
|
||||||
def changed(self, changed):
|
|
||||||
if not self._changed:
|
|
||||||
self._changed = changed
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def build_entity(self):
|
|
||||||
"""
|
|
||||||
This method should return oVirt Python SDK type, which we want to
|
|
||||||
create or update, initialized by values passed by Ansible module.
|
|
||||||
|
|
||||||
For example if we want to create VM, we will return following:
|
|
||||||
types.Vm(name=self._module.params['vm_name'])
|
|
||||||
|
|
||||||
:return: Specific instance of sdk.Struct.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def param(self, name, default=None):
|
|
||||||
"""
|
|
||||||
Return a module parameter specified by it's name.
|
|
||||||
"""
|
|
||||||
return self._module.params.get(name, default)
|
|
||||||
|
|
||||||
def update_check(self, entity):
|
|
||||||
"""
|
|
||||||
This method handle checks whether the entity values are same as values
|
|
||||||
passed to ansible module. By default we don't compare any values.
|
|
||||||
|
|
||||||
:param entity: Entity we want to compare with Ansible module values.
|
|
||||||
:return: True if values are same, so we don't need to update the entity.
|
|
||||||
"""
|
|
||||||
return True
|
|
||||||
|
|
||||||
def pre_create(self, entity):
|
|
||||||
"""
|
|
||||||
This method is called right before entity is created.
|
|
||||||
|
|
||||||
:param entity: Entity to be created or updated.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def post_create(self, entity):
|
|
||||||
"""
|
|
||||||
This method is called right after entity is created.
|
|
||||||
|
|
||||||
:param entity: Entity which was created.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def post_update(self, entity):
|
|
||||||
"""
|
|
||||||
This method is called right after entity is updated.
|
|
||||||
|
|
||||||
:param entity: Entity which was updated.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def diff_update(self, after, update):
|
|
||||||
for k, v in update.items():
|
|
||||||
if isinstance(v, Mapping):
|
|
||||||
after[k] = self.diff_update(after.get(k, dict()), v)
|
|
||||||
else:
|
|
||||||
after[k] = update[k]
|
|
||||||
return after
|
|
||||||
|
|
||||||
def create(
|
|
||||||
self,
|
|
||||||
entity=None,
|
|
||||||
result_state=None,
|
|
||||||
fail_condition=lambda e: False,
|
|
||||||
search_params=None,
|
|
||||||
update_params=None,
|
|
||||||
_wait=None,
|
|
||||||
force_create=False,
|
|
||||||
**kwargs
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Method which is called when state of the entity is 'present'. If user
|
|
||||||
don't provide `entity` parameter the entity is searched using
|
|
||||||
`search_params` parameter. If entity is found it's updated, whether
|
|
||||||
the entity should be updated is checked by `update_check` method.
|
|
||||||
The corresponding updated entity is build by `build_entity` method.
|
|
||||||
|
|
||||||
Function executed after entity is created can optionally be specified
|
|
||||||
in `post_create` parameter. Function executed after entity is updated
|
|
||||||
can optionally be specified in `post_update` parameter.
|
|
||||||
|
|
||||||
:param entity: Entity we want to update, if exists.
|
|
||||||
:param result_state: State which should entity has in order to finish task.
|
|
||||||
:param fail_condition: Function which checks incorrect state of entity, if it returns `True` Exception is raised.
|
|
||||||
:param search_params: Dictionary of parameters to be used for search.
|
|
||||||
:param update_params: The params which should be passed to update method.
|
|
||||||
:param kwargs: Additional parameters passed when creating entity.
|
|
||||||
:return: Dictionary with values returned by Ansible module.
|
|
||||||
"""
|
|
||||||
if entity is None and not force_create:
|
|
||||||
entity = self.search_entity(search_params)
|
|
||||||
|
|
||||||
self.pre_create(entity)
|
|
||||||
|
|
||||||
if entity:
|
|
||||||
# Entity exists, so update it:
|
|
||||||
entity_service = self._service.service(entity.id)
|
|
||||||
if not self.update_check(entity):
|
|
||||||
new_entity = self.build_entity()
|
|
||||||
if not self._module.check_mode:
|
|
||||||
update_params = update_params or {}
|
|
||||||
updated_entity = entity_service.update(
|
|
||||||
new_entity,
|
|
||||||
**update_params
|
|
||||||
)
|
|
||||||
self.post_update(entity)
|
|
||||||
|
|
||||||
# Update diffs only if user specified --diff parameter,
|
|
||||||
# so we don't useless overload API:
|
|
||||||
if self._module._diff:
|
|
||||||
before = get_dict_of_struct(
|
|
||||||
entity,
|
|
||||||
self._connection,
|
|
||||||
fetch_nested=True,
|
|
||||||
attributes=['name'],
|
|
||||||
)
|
|
||||||
after = before.copy()
|
|
||||||
self.diff_update(after, get_dict_of_struct(new_entity))
|
|
||||||
self._diff['before'] = before
|
|
||||||
self._diff['after'] = after
|
|
||||||
|
|
||||||
self.changed = True
|
|
||||||
else:
|
|
||||||
# Entity don't exists, so create it:
|
|
||||||
if not self._module.check_mode:
|
|
||||||
entity = self._service.add(
|
|
||||||
self.build_entity(),
|
|
||||||
**kwargs
|
|
||||||
)
|
|
||||||
self.post_create(entity)
|
|
||||||
self.changed = True
|
|
||||||
|
|
||||||
if not self._module.check_mode:
|
|
||||||
# Wait for the entity to be created and to be in the defined state:
|
|
||||||
entity_service = self._service.service(entity.id)
|
|
||||||
|
|
||||||
def state_condition(entity):
|
|
||||||
return entity
|
|
||||||
|
|
||||||
if result_state:
|
|
||||||
|
|
||||||
def state_condition(entity):
|
|
||||||
return entity and entity.status == result_state
|
|
||||||
|
|
||||||
wait(
|
|
||||||
service=entity_service,
|
|
||||||
condition=state_condition,
|
|
||||||
fail_condition=fail_condition,
|
|
||||||
wait=_wait if _wait is not None else self._module.params['wait'],
|
|
||||||
timeout=self._module.params['timeout'],
|
|
||||||
poll_interval=self._module.params['poll_interval'],
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
'changed': self.changed,
|
|
||||||
'id': getattr(entity, 'id', None),
|
|
||||||
type(entity).__name__.lower(): get_dict_of_struct(
|
|
||||||
struct=entity,
|
|
||||||
connection=self._connection,
|
|
||||||
fetch_nested=self._module.params.get('fetch_nested'),
|
|
||||||
attributes=self._module.params.get('nested_attributes'),
|
|
||||||
),
|
|
||||||
'diff': self._diff,
|
|
||||||
}
|
|
||||||
|
|
||||||
def pre_remove(self, entity):
|
|
||||||
"""
|
|
||||||
This method is called right before entity is removed.
|
|
||||||
|
|
||||||
:param entity: Entity which we want to remove.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def entity_name(self, entity):
|
|
||||||
return "{e_type} '{e_name}'".format(
|
|
||||||
e_type=type(entity).__name__.lower(),
|
|
||||||
e_name=getattr(entity, 'name', None),
|
|
||||||
)
|
|
||||||
|
|
||||||
def remove(self, entity=None, search_params=None, **kwargs):
|
|
||||||
"""
|
|
||||||
Method which is called when state of the entity is 'absent'. If user
|
|
||||||
don't provide `entity` parameter the entity is searched using
|
|
||||||
`search_params` parameter. If entity is found it's removed.
|
|
||||||
|
|
||||||
Function executed before remove is executed can optionally be specified
|
|
||||||
in `pre_remove` parameter.
|
|
||||||
|
|
||||||
:param entity: Entity we want to remove.
|
|
||||||
:param search_params: Dictionary of parameters to be used for search.
|
|
||||||
:param kwargs: Additional parameters passed when removing entity.
|
|
||||||
:return: Dictionary with values returned by Ansible module.
|
|
||||||
"""
|
|
||||||
if entity is None:
|
|
||||||
entity = self.search_entity(search_params)
|
|
||||||
|
|
||||||
if entity is None:
|
|
||||||
return {
|
|
||||||
'changed': self.changed,
|
|
||||||
'msg': "Entity wasn't found."
|
|
||||||
}
|
|
||||||
|
|
||||||
self.pre_remove(entity)
|
|
||||||
|
|
||||||
entity_service = self._service.service(entity.id)
|
|
||||||
if not self._module.check_mode:
|
|
||||||
entity_service.remove(**kwargs)
|
|
||||||
wait(
|
|
||||||
service=entity_service,
|
|
||||||
condition=lambda entity: not entity,
|
|
||||||
wait=self._module.params['wait'],
|
|
||||||
timeout=self._module.params['timeout'],
|
|
||||||
poll_interval=self._module.params['poll_interval'],
|
|
||||||
)
|
|
||||||
self.changed = True
|
|
||||||
|
|
||||||
return {
|
|
||||||
'changed': self.changed,
|
|
||||||
'id': entity.id,
|
|
||||||
type(entity).__name__.lower(): get_dict_of_struct(
|
|
||||||
struct=entity,
|
|
||||||
connection=self._connection,
|
|
||||||
fetch_nested=self._module.params.get('fetch_nested'),
|
|
||||||
attributes=self._module.params.get('nested_attributes'),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
def action(
|
|
||||||
self,
|
|
||||||
action,
|
|
||||||
entity=None,
|
|
||||||
action_condition=lambda e: e,
|
|
||||||
wait_condition=lambda e: e,
|
|
||||||
fail_condition=lambda e: False,
|
|
||||||
pre_action=lambda e: e,
|
|
||||||
post_action=lambda e: None,
|
|
||||||
search_params=None,
|
|
||||||
**kwargs
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
This method is executed when we want to change the state of some oVirt
|
|
||||||
entity. The action to be executed on oVirt service is specified by
|
|
||||||
`action` parameter. Whether the action should be executed can be
|
|
||||||
specified by passing `action_condition` parameter. State which the
|
|
||||||
entity should be in after execution of the action can be specified
|
|
||||||
by `wait_condition` parameter.
|
|
||||||
|
|
||||||
Function executed before an action on entity can optionally be specified
|
|
||||||
in `pre_action` parameter. Function executed after an action on entity can
|
|
||||||
optionally be specified in `post_action` parameter.
|
|
||||||
|
|
||||||
:param action: Action which should be executed by service on entity.
|
|
||||||
:param entity: Entity we want to run action on.
|
|
||||||
:param action_condition: Function which is executed when checking if action should be executed.
|
|
||||||
:param fail_condition: Function which checks incorrect state of entity, if it returns `True` Exception is raised.
|
|
||||||
:param wait_condition: Function which is executed when waiting on result state.
|
|
||||||
:param pre_action: Function which is executed before running the action.
|
|
||||||
:param post_action: Function which is executed after running the action.
|
|
||||||
:param search_params: Dictionary of parameters to be used for search.
|
|
||||||
:param kwargs: Additional parameters passed to action.
|
|
||||||
:return: Dictionary with values returned by Ansible module.
|
|
||||||
"""
|
|
||||||
if entity is None:
|
|
||||||
entity = self.search_entity(search_params)
|
|
||||||
|
|
||||||
entity = pre_action(entity)
|
|
||||||
|
|
||||||
if entity is None:
|
|
||||||
self._module.fail_json(
|
|
||||||
msg="Entity not found, can't run action '{0}'.".format(
|
|
||||||
action
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
entity_service = self._service.service(entity.id)
|
|
||||||
entity = entity_service.get()
|
|
||||||
if action_condition(entity):
|
|
||||||
if not self._module.check_mode:
|
|
||||||
getattr(entity_service, action)(**kwargs)
|
|
||||||
self.changed = True
|
|
||||||
|
|
||||||
post_action(entity)
|
|
||||||
|
|
||||||
wait(
|
|
||||||
service=self._service.service(entity.id),
|
|
||||||
condition=wait_condition,
|
|
||||||
fail_condition=fail_condition,
|
|
||||||
wait=self._module.params['wait'],
|
|
||||||
timeout=self._module.params['timeout'],
|
|
||||||
poll_interval=self._module.params['poll_interval'],
|
|
||||||
)
|
|
||||||
return {
|
|
||||||
'changed': self.changed,
|
|
||||||
'id': entity.id,
|
|
||||||
type(entity).__name__.lower(): get_dict_of_struct(
|
|
||||||
struct=entity,
|
|
||||||
connection=self._connection,
|
|
||||||
fetch_nested=self._module.params.get('fetch_nested'),
|
|
||||||
attributes=self._module.params.get('nested_attributes'),
|
|
||||||
),
|
|
||||||
'diff': self._diff,
|
|
||||||
}
|
|
||||||
|
|
||||||
def wait_for_import(self, condition=lambda e: True):
|
|
||||||
if self._module.params['wait']:
|
|
||||||
start = time.time()
|
|
||||||
timeout = self._module.params['timeout']
|
|
||||||
poll_interval = self._module.params['poll_interval']
|
|
||||||
while time.time() < start + timeout:
|
|
||||||
entity = self.search_entity()
|
|
||||||
if entity and condition(entity):
|
|
||||||
return entity
|
|
||||||
time.sleep(poll_interval)
|
|
||||||
|
|
||||||
def search_entity(self, search_params=None, list_params=None):
|
|
||||||
"""
|
|
||||||
Always first try to search by `ID`, if ID isn't specified,
|
|
||||||
check if user constructed special search in `search_params`,
|
|
||||||
if not search by `name`.
|
|
||||||
"""
|
|
||||||
entity = None
|
|
||||||
|
|
||||||
if 'id' in self._module.params and self._module.params['id'] is not None:
|
|
||||||
entity = get_entity(self._service.service(self._module.params['id']), get_params=list_params)
|
|
||||||
elif search_params is not None:
|
|
||||||
entity = search_by_attributes(self._service, list_params=list_params, **search_params)
|
|
||||||
elif self._module.params.get('name') is not None:
|
|
||||||
entity = search_by_attributes(self._service, list_params=list_params, name=self._module.params['name'])
|
|
||||||
|
|
||||||
return entity
|
|
||||||
|
|
||||||
def _get_major(self, full_version):
|
|
||||||
if full_version is None or full_version == "":
|
|
||||||
return None
|
|
||||||
if isinstance(full_version, otypes.Version):
|
|
||||||
return int(full_version.major)
|
|
||||||
return int(full_version.split('.')[0])
|
|
||||||
|
|
||||||
def _get_minor(self, full_version):
|
|
||||||
if full_version is None or full_version == "":
|
|
||||||
return None
|
|
||||||
if isinstance(full_version, otypes.Version):
|
|
||||||
return int(full_version.minor)
|
|
||||||
return int(full_version.split('.')[1])
|
|
||||||
|
|
||||||
|
|
||||||
def _sdk4_error_maybe():
|
|
||||||
"""
|
|
||||||
Allow for ovirtsdk4 not being installed.
|
|
||||||
"""
|
|
||||||
if HAS_SDK:
|
|
||||||
return sdk.Error
|
|
||||||
return type(None)
|
|
||||||
|
|
||||||
|
|
||||||
class OvirtRetry(CloudRetry):
|
|
||||||
base_class = _sdk4_error_maybe()
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def status_code_from_exception(error):
|
|
||||||
return error.code
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def found(response_code, catch_extra_error_codes=None):
|
|
||||||
# This is a list of error codes to retry.
|
|
||||||
retry_on = [
|
|
||||||
# HTTP status: Conflict
|
|
||||||
409,
|
|
||||||
]
|
|
||||||
if catch_extra_error_codes:
|
|
||||||
retry_on.extend(catch_extra_error_codes)
|
|
||||||
|
|
||||||
return response_code in retry_on
|
|
||||||
@@ -10,7 +10,7 @@ __metaclass__ = type
|
|||||||
import csv
|
import csv
|
||||||
from io import BytesIO, StringIO
|
from io import BytesIO, StringIO
|
||||||
|
|
||||||
from ansible.module_utils._text import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
from ansible.module_utils.six import PY3
|
from ansible.module_utils.six import PY3
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user