mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-05-02 03:12:46 +00:00
Compare commits
785 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fc02723672 | ||
|
|
a1357411cb | ||
|
|
671abf7d05 | ||
|
|
236460861a | ||
|
|
ebdbfe30fd | ||
|
|
82e771cd25 | ||
|
|
09037c0e0f | ||
|
|
2dd395bd12 | ||
|
|
042c05cf26 | ||
|
|
c158c2cc95 | ||
|
|
4b4479844b | ||
|
|
bba060ef71 | ||
|
|
aaec9ae7ba | ||
|
|
340cb0d231 | ||
|
|
1e488d995a | ||
|
|
0a661a6506 | ||
|
|
f53804f542 | ||
|
|
e09afd224b | ||
|
|
d311049808 | ||
|
|
89ca1a7eae | ||
|
|
02a5b75cc3 | ||
|
|
2c8d75917f | ||
|
|
435d593e23 | ||
|
|
5e5ae243b1 | ||
|
|
f5b3f7da24 | ||
|
|
06c24f1f6a | ||
|
|
49a51c127e | ||
|
|
8b2d61b436 | ||
|
|
e7b3808bac | ||
|
|
f6be766263 | ||
|
|
a8ec62e620 | ||
|
|
4f49435e8a | ||
|
|
95978430f5 | ||
|
|
cc7e0083b8 | ||
|
|
f55c9b77fc | ||
|
|
3fc582a380 | ||
|
|
7f63f47efc | ||
|
|
be65a9f345 | ||
|
|
044831904c | ||
|
|
0ed510a050 | ||
|
|
103bde7764 | ||
|
|
b8f55cccdf | ||
|
|
b5abccfe31 | ||
|
|
335f6606fe | ||
|
|
5bdbbd1f4f | ||
|
|
3f67766bac | ||
|
|
6848d6a302 | ||
|
|
0f8dd60627 | ||
|
|
4190629e61 | ||
|
|
38d719da07 | ||
|
|
ea40a39a09 | ||
|
|
67562860e2 | ||
|
|
0ece2053f8 | ||
|
|
3653dff68c | ||
|
|
171a028ef8 | ||
|
|
4ab8f79eae | ||
|
|
871e2809ed | ||
|
|
ebf5dd9007 | ||
|
|
054b369740 | ||
|
|
f9598c8586 | ||
|
|
0b116fa1dc | ||
|
|
01d0f3ccb7 | ||
|
|
7a8c3346ff | ||
|
|
f8acda70d2 | ||
|
|
2eedbdc928 | ||
|
|
c251868e55 | ||
|
|
e501974a9e | ||
|
|
25a17e7b6e | ||
|
|
dc92cda736 | ||
|
|
ee895080fb | ||
|
|
5436a0c602 | ||
|
|
6b59a3c1f6 | ||
|
|
02fa9106c6 | ||
|
|
94699318e2 | ||
|
|
0dab7c8f3b | ||
|
|
e29c1fe51f | ||
|
|
196f9b1bac | ||
|
|
4f6f4b343f | ||
|
|
6d75ac4fb3 | ||
|
|
eca2094831 | ||
|
|
8a6fb28296 | ||
|
|
6d94f0acbe | ||
|
|
ce05de750e | ||
|
|
31f1b9a7c7 | ||
|
|
46790b3dcb | ||
|
|
41aa6bc450 | ||
|
|
15b0eb155b | ||
|
|
169b65b43e | ||
|
|
f71f8e608d | ||
|
|
fb6f4760ae | ||
|
|
ae041e56c6 | ||
|
|
b3fecf0b6f | ||
|
|
59e8eca4c8 | ||
|
|
e95a3b5215 | ||
|
|
235a851350 | ||
|
|
5584998e0e | ||
|
|
8b259f8bc1 | ||
|
|
24789f86fa | ||
|
|
ddbfbcb47e | ||
|
|
69563766ec | ||
|
|
4c7f64b121 | ||
|
|
1e397ace75 | ||
|
|
71349d7e1d | ||
|
|
19de563a1d | ||
|
|
e550a0f58f | ||
|
|
eaa1f7c841 | ||
|
|
7a653bff94 | ||
|
|
c8360e7d1a | ||
|
|
94d33854b3 | ||
|
|
3d088b68ec | ||
|
|
7b8fa38878 | ||
|
|
6bf1eead47 | ||
|
|
478100011b | ||
|
|
7ac342e237 | ||
|
|
e2573de08d | ||
|
|
4412bdba9b | ||
|
|
374270d242 | ||
|
|
4c1077b2a0 | ||
|
|
a4f4a25b16 | ||
|
|
17f5a5b575 | ||
|
|
0be6e61b31 | ||
|
|
36978d71e1 | ||
|
|
3cc62e3827 | ||
|
|
e5f290e885 | ||
|
|
003f9e498e | ||
|
|
700bb27d51 | ||
|
|
07c68cb7f1 | ||
|
|
e9f0fcac0d | ||
|
|
a2e198d8a7 | ||
|
|
9ccb6e029d | ||
|
|
adf0f41b4b | ||
|
|
63a8f1e89f | ||
|
|
76de353377 | ||
|
|
960ed5acfd | ||
|
|
f636201450 | ||
|
|
fb01bf6ff7 | ||
|
|
8569e7eb58 | ||
|
|
c100ecda2c | ||
|
|
f33f50cf2c | ||
|
|
452d6f2fa7 | ||
|
|
f3828ba9ca | ||
|
|
b4eb8e5e22 | ||
|
|
8c02531c56 | ||
|
|
41171a02b7 | ||
|
|
84a6f610f7 | ||
|
|
2f623b7398 | ||
|
|
4e73ae1a86 | ||
|
|
392f5b4702 | ||
|
|
d5d8e1d188 | ||
|
|
fbd0a80439 | ||
|
|
c55df29ec9 | ||
|
|
2405857338 | ||
|
|
1c6b9507bc | ||
|
|
9363356941 | ||
|
|
7b92d84878 | ||
|
|
c8fe77c359 | ||
|
|
d5465ff471 | ||
|
|
7cd8f6edff | ||
|
|
125cafb371 | ||
|
|
9e11cd0813 | ||
|
|
2b48825499 | ||
|
|
8196cacff8 | ||
|
|
bd4f1a3e5c | ||
|
|
cfee3284cd | ||
|
|
9c3e14701b | ||
|
|
cae0457e0e | ||
|
|
a9e892952d | ||
|
|
e8ff74f077 | ||
|
|
0478b0c5a1 | ||
|
|
4a642c247c | ||
|
|
8bffd757ce | ||
|
|
eb294ae86e | ||
|
|
cea886562a | ||
|
|
30c5de00e9 | ||
|
|
ae3236389e | ||
|
|
9717bac816 | ||
|
|
6b5b051c3d | ||
|
|
5d7ff825de | ||
|
|
f58ab1b642 | ||
|
|
f644720c74 | ||
|
|
7a0428d7e6 | ||
|
|
b3eadab36a | ||
|
|
17280ed73e | ||
|
|
207ea056a7 | ||
|
|
f7189a55c6 | ||
|
|
edb0d5f6ca | ||
|
|
4c6f77cbc5 | ||
|
|
3b812e64ff | ||
|
|
3856c184d2 | ||
|
|
facdfb9519 | ||
|
|
1f90168f37 | ||
|
|
f238b90fcf | ||
|
|
9d245287b2 | ||
|
|
8c8e755369 | ||
|
|
946727309f | ||
|
|
bc716b7ab4 | ||
|
|
8ab0591e85 | ||
|
|
d557997242 | ||
|
|
e116cccb82 | ||
|
|
301483a7f3 | ||
|
|
61c326ce81 | ||
|
|
4e35837063 | ||
|
|
0dc63be643 | ||
|
|
14e2dd4cea | ||
|
|
3e5d58129d | ||
|
|
79241e672f | ||
|
|
12415f3e2f | ||
|
|
ca478eb38d | ||
|
|
aeb668a645 | ||
|
|
7b83b7f7bb | ||
|
|
0e1c4a20c2 | ||
|
|
70bf4e449c | ||
|
|
ac8942979b | ||
|
|
28fb1e3eac | ||
|
|
2fabb55a4d | ||
|
|
41b624ffaf | ||
|
|
e2283faf98 | ||
|
|
af276713aa | ||
|
|
68c3c9b7ba | ||
|
|
7bb291864e | ||
|
|
0090af8cfb | ||
|
|
42aeeb975b | ||
|
|
9dd7be05dc | ||
|
|
11a847a7b5 | ||
|
|
ebcceafdb7 | ||
|
|
79b3521547 | ||
|
|
7aae8a94f2 | ||
|
|
acde075b5f | ||
|
|
0310c7875d | ||
|
|
8bce7601bc | ||
|
|
ab5c4b186b | ||
|
|
ee2779e6c1 | ||
|
|
91ac9f84b8 | ||
|
|
1c1d58482c | ||
|
|
138740127a | ||
|
|
1d7aad9b46 | ||
|
|
f3a12a9e78 | ||
|
|
0e818c4812 | ||
|
|
8751f0feea | ||
|
|
5dd4cc5148 | ||
|
|
39f27d7d43 | ||
|
|
4dec46778c | ||
|
|
d5e0d36e48 | ||
|
|
fb45b908dd | ||
|
|
6e16c6c649 | ||
|
|
033d5f23f8 | ||
|
|
6367bb853d | ||
|
|
8e6941ed5d | ||
|
|
2d730da8a7 | ||
|
|
88f5100657 | ||
|
|
0293f84b3e | ||
|
|
8333c881d3 | ||
|
|
4a394088b3 | ||
|
|
d72d9b3e45 | ||
|
|
e44bbbdcba | ||
|
|
eca7c1a00b | ||
|
|
b1ff713c41 | ||
|
|
c95a8b6540 | ||
|
|
57dcd31c82 | ||
|
|
7ba3d84004 | ||
|
|
837c1289d0 | ||
|
|
3c7c946297 | ||
|
|
f0f5035ba2 | ||
|
|
3955a6be0f | ||
|
|
bc5b4bdef3 | ||
|
|
8f8b1ee4ce | ||
|
|
6534db4942 | ||
|
|
7a55295798 | ||
|
|
0fd7cfd2d6 | ||
|
|
68ca28b69a | ||
|
|
c91e7b4c03 | ||
|
|
2583152512 | ||
|
|
cee6c98d2a | ||
|
|
8859379bed | ||
|
|
e899631137 | ||
|
|
e99dcaa729 | ||
|
|
c12dd2f9c7 | ||
|
|
8253fb171d | ||
|
|
5f1f76b8f4 | ||
|
|
e3793e09e8 | ||
|
|
a800a6dbad | ||
|
|
7d45b678e4 | ||
|
|
cd0ca389ed | ||
|
|
9ebf72d560 | ||
|
|
d6a4fab8ea | ||
|
|
4e43b124cd | ||
|
|
9bcf61d153 | ||
|
|
7f5305fb80 | ||
|
|
aea60a8dd6 | ||
|
|
195ac4d7e6 | ||
|
|
91d515bd1e | ||
|
|
7d8f5559e2 | ||
|
|
9e68816db9 | ||
|
|
fc1ba5152c | ||
|
|
205e28d2fe | ||
|
|
27629b6497 | ||
|
|
5735c5a045 | ||
|
|
ceb051851e | ||
|
|
a17083ea84 | ||
|
|
d03fdc8093 | ||
|
|
469209a17f | ||
|
|
72ea96cc74 | ||
|
|
b2c34d1afe | ||
|
|
f4fca86f82 | ||
|
|
78c8fa0d49 | ||
|
|
d3650f27b0 | ||
|
|
7b901f9caa | ||
|
|
35d6ab10bb | ||
|
|
d811807e1f | ||
|
|
dedd625700 | ||
|
|
10e41862cb | ||
|
|
3d418d9ede | ||
|
|
ebb150c3f9 | ||
|
|
df28c80946 | ||
|
|
403152d91a | ||
|
|
75e35bfa6c | ||
|
|
fa846e9677 | ||
|
|
db62a36d6e | ||
|
|
e3dae0b646 | ||
|
|
9aaf8e4825 | ||
|
|
cf0a233d7b | ||
|
|
cebd5bb3c8 | ||
|
|
d452e903f8 | ||
|
|
c76ef6ba99 | ||
|
|
52bd7cdb2d | ||
|
|
da3ba1e7be | ||
|
|
cb46453b78 | ||
|
|
a784e66a2c | ||
|
|
52cc1881d8 | ||
|
|
5c7076e0bc | ||
|
|
fc752f3143 | ||
|
|
4637c265fa | ||
|
|
33e980039b | ||
|
|
e49775765d | ||
|
|
c2590cfcd8 | ||
|
|
4282b6ed16 | ||
|
|
3c77c8ec3c | ||
|
|
07e4e4a782 | ||
|
|
d881a59ed7 | ||
|
|
338328341e | ||
|
|
6e48528b22 | ||
|
|
f545c300d9 | ||
|
|
df496e37c0 | ||
|
|
c35f13084d | ||
|
|
4d4e626f95 | ||
|
|
faa913d566 | ||
|
|
7e4a39964e | ||
|
|
c6aecd18f4 | ||
|
|
c5bdb1501e | ||
|
|
163bfd0f37 | ||
|
|
0331798f84 | ||
|
|
2c7940c5de | ||
|
|
4ef5c3c11a | ||
|
|
7468bf30f2 | ||
|
|
b4e7b7cb50 | ||
|
|
666db05eda | ||
|
|
e255e5ed0c | ||
|
|
f79aa6f63f | ||
|
|
da040cb412 | ||
|
|
04b68c296b | ||
|
|
8e2fa624e0 | ||
|
|
c7ac7fbefd | ||
|
|
ed6e87c994 | ||
|
|
cc49336090 | ||
|
|
4b410c5007 | ||
|
|
13be47c7a6 | ||
|
|
94903785d0 | ||
|
|
20e394fd3d | ||
|
|
17157cdfb5 | ||
|
|
769b22cd4c | ||
|
|
dbd19a5583 | ||
|
|
45f7661249 | ||
|
|
ed472d8291 | ||
|
|
c72d8d4b56 | ||
|
|
e9b58cfc09 | ||
|
|
98d25a3e4d | ||
|
|
7a000108af | ||
|
|
a8a1c0af2a | ||
|
|
c394fbe8e9 | ||
|
|
fed5965518 | ||
|
|
1e0a2a72f7 | ||
|
|
add595e121 | ||
|
|
4476a4bb1c | ||
|
|
f104fe3a58 | ||
|
|
1bc052ae6b | ||
|
|
6260d5f873 | ||
|
|
6907ae5a5e | ||
|
|
2314db59c7 | ||
|
|
e32515889b | ||
|
|
7542b5429b | ||
|
|
18afa4e8b0 | ||
|
|
b23d011582 | ||
|
|
8b56b6dfea | ||
|
|
c2a90c215f | ||
|
|
39a66a3196 | ||
|
|
75f649648e | ||
|
|
1f29fa2e39 | ||
|
|
26b8f30afa | ||
|
|
80f43bbbf5 | ||
|
|
ec58aadaa7 | ||
|
|
6f50af8a6e | ||
|
|
e2604e7533 | ||
|
|
8208e52c42 | ||
|
|
39e4f89ff0 | ||
|
|
c7202a1902 | ||
|
|
1f64be7cac | ||
|
|
2447bc90a4 | ||
|
|
98029089d8 | ||
|
|
964aa945a4 | ||
|
|
7d24ba8a28 | ||
|
|
be5b4adf5b | ||
|
|
62b45e235d | ||
|
|
f99508b307 | ||
|
|
9afc096cd2 | ||
|
|
27c0c29cf3 | ||
|
|
c890a161ae | ||
|
|
c911aa8a84 | ||
|
|
a3cb344689 | ||
|
|
bd4112e87b | ||
|
|
ec9b1fc503 | ||
|
|
5467334117 | ||
|
|
e962b9237d | ||
|
|
48c36bc72b | ||
|
|
56bb7d7b9d | ||
|
|
af2438f664 | ||
|
|
a68de9bfb6 | ||
|
|
4614047132 | ||
|
|
6b02eaa795 | ||
|
|
6f774fd4a5 | ||
|
|
f05618a6f2 | ||
|
|
98a956a9d6 | ||
|
|
60d51f7b49 | ||
|
|
245be47a4e | ||
|
|
6d346ddadd | ||
|
|
cb969fc468 | ||
|
|
ec6c2a76ad | ||
|
|
b22777de44 | ||
|
|
151f6c9ce3 | ||
|
|
0acaad60c8 | ||
|
|
a06c1f5c9a | ||
|
|
becda864c4 | ||
|
|
92d077e816 | ||
|
|
700623863c | ||
|
|
31f57b9385 | ||
|
|
14038511a1 | ||
|
|
3db0a11148 | ||
|
|
05ba79c5fe | ||
|
|
0df708b15a | ||
|
|
1829ad4fdc | ||
|
|
0e99b006a2 | ||
|
|
f66bd1035d | ||
|
|
7732d64abb | ||
|
|
03f3b74934 | ||
|
|
29e9afcbf4 | ||
|
|
11ba71c802 | ||
|
|
d3badc6d43 | ||
|
|
92a07f1794 | ||
|
|
7a44dbfe45 | ||
|
|
4f9e7bd793 | ||
|
|
872bc91096 | ||
|
|
641b0693b4 | ||
|
|
45a3396ab0 | ||
|
|
e38f9e5cfc | ||
|
|
60ba39da58 | ||
|
|
f6fa7fb273 | ||
|
|
899fcb8749 | ||
|
|
a5c448d6e8 | ||
|
|
74bd7f1471 | ||
|
|
7e6514b4d4 | ||
|
|
3c7f05c42d | ||
|
|
a13a6d284c | ||
|
|
122086e83b | ||
|
|
92e4bd184d | ||
|
|
12f2d71950 | ||
|
|
e3f72bca4f | ||
|
|
429b4b14a8 | ||
|
|
2d12c6678c | ||
|
|
474c7a7240 | ||
|
|
434032080e | ||
|
|
eec5c82a55 | ||
|
|
97ea891377 | ||
|
|
b3ad22f33f | ||
|
|
977f53f823 | ||
|
|
4c26dc0760 | ||
|
|
3c6131b451 | ||
|
|
2499c1132d | ||
|
|
3fe559b88f | ||
|
|
28fed38757 | ||
|
|
f2196d452f | ||
|
|
07124473cc | ||
|
|
a56cec8582 | ||
|
|
5c2e9e0c5b | ||
|
|
df5818282b | ||
|
|
69aea38683 | ||
|
|
01dce04e33 | ||
|
|
5ab43f0ff1 | ||
|
|
910fb933a7 | ||
|
|
7537674d4b | ||
|
|
6e42e442bc | ||
|
|
161539729d | ||
|
|
8f76c847fe | ||
|
|
3f29683191 | ||
|
|
a89990ab9b | ||
|
|
7bc937b5e8 | ||
|
|
a1c39cc882 | ||
|
|
859039c47a | ||
|
|
b3f669a574 | ||
|
|
770bae70db | ||
|
|
0a6c57bc4d | ||
|
|
459b9f3f9a | ||
|
|
d9436069f1 | ||
|
|
fcf1cb7fbc | ||
|
|
5727f1afd4 | ||
|
|
fbada0026e | ||
|
|
c4373d5ed5 | ||
|
|
54291ab1d1 | ||
|
|
bbd67b5017 | ||
|
|
6d6d3e8039 | ||
|
|
e31999f369 | ||
|
|
09f99e66fe | ||
|
|
5491ff7c6a | ||
|
|
163f3ee305 | ||
|
|
4152770281 | ||
|
|
ecc9afab0b | ||
|
|
6f9b954048 | ||
|
|
74cd18b682 | ||
|
|
0b64fc1ee4 | ||
|
|
5fe39082d5 | ||
|
|
0ff52abfdd | ||
|
|
653ae1b48a | ||
|
|
16d124bbe2 | ||
|
|
4455df380e | ||
|
|
06c7ba640e | ||
|
|
a6f6bcc555 | ||
|
|
cd06325f6b | ||
|
|
a20ee0e816 | ||
|
|
acb6a2f76d | ||
|
|
5a1c68cb62 | ||
|
|
776374ee78 | ||
|
|
c916052124 | ||
|
|
88b1fbbdf0 | ||
|
|
48db44f199 | ||
|
|
eb4c01260f | ||
|
|
10561e6f30 | ||
|
|
5c26387a54 | ||
|
|
8226ea87cf | ||
|
|
411c7d4f32 | ||
|
|
a2f377c621 | ||
|
|
ac0956ed6f | ||
|
|
0d02265a23 | ||
|
|
f38d974d42 | ||
|
|
a975574618 | ||
|
|
b05fa358e6 | ||
|
|
c44118ac3c | ||
|
|
886d4a6596 | ||
|
|
94e3635c0a | ||
|
|
3d03c373ff | ||
|
|
8fc11fe88f | ||
|
|
cecaa1840d | ||
|
|
2429e228a4 | ||
|
|
c8410a924e | ||
|
|
8a2ac4f1eb | ||
|
|
9553dd9ddf | ||
|
|
e63c2f54cf | ||
|
|
c71f662d55 | ||
|
|
9069f673e2 | ||
|
|
67d1b6c413 | ||
|
|
3506f73da1 | ||
|
|
d0b4e91cac | ||
|
|
ce1b9887b1 | ||
|
|
107df41d9c | ||
|
|
39f3b151e8 | ||
|
|
ccf7f62325 | ||
|
|
a8c41ac4c1 | ||
|
|
5b3b7a1fb1 | ||
|
|
9fb686fe35 | ||
|
|
93be499f26 | ||
|
|
cc72fa0786 | ||
|
|
658637dc70 | ||
|
|
b1f4604067 | ||
|
|
064f76c27b | ||
|
|
86166ccade | ||
|
|
1180843e35 | ||
|
|
26fe42776c | ||
|
|
5874711c6e | ||
|
|
12fa2452d8 | ||
|
|
a894f8e7eb | ||
|
|
410288401b | ||
|
|
67ddb567c9 | ||
|
|
3de4682193 | ||
|
|
8df9d0d7de | ||
|
|
ec6496024f | ||
|
|
447d4b0267 | ||
|
|
b523d1b1c9 | ||
|
|
464812a2c2 | ||
|
|
5e6b8e5327 | ||
|
|
24b74cc4b9 | ||
|
|
c814fd0530 | ||
|
|
1bdf8fc025 | ||
|
|
3b109abe18 | ||
|
|
cc8009621f | ||
|
|
c7e2875a4d | ||
|
|
24f2b980b7 | ||
|
|
1d86d49688 | ||
|
|
5b4f41748d | ||
|
|
29a2df8e6b | ||
|
|
8610223d03 | ||
|
|
fea0ffa5aa | ||
|
|
e7ccbc2f18 | ||
|
|
92df5e8fec | ||
|
|
5d9a7ab240 | ||
|
|
d4fb6bf8a6 | ||
|
|
7fc7af306c | ||
|
|
c4e2b73193 | ||
|
|
96dfb89b01 | ||
|
|
daaa008713 | ||
|
|
2d660a1252 | ||
|
|
83080cc005 | ||
|
|
7c913b239a | ||
|
|
a7d1b0fc52 | ||
|
|
ab84f1632f | ||
|
|
8ef77d8664 | ||
|
|
fe18b05f08 | ||
|
|
84e0190eee | ||
|
|
bc6ae849b3 | ||
|
|
002f137134 | ||
|
|
0bc5f24863 | ||
|
|
4700accbff | ||
|
|
d356e255e0 | ||
|
|
89ad40db41 | ||
|
|
199ba0a170 | ||
|
|
293021c3dd | ||
|
|
a32f1d699b | ||
|
|
deaad6e547 | ||
|
|
0bf84ba2b6 | ||
|
|
38479ee9ff | ||
|
|
ac302eb77d | ||
|
|
27cb0c9090 | ||
|
|
6af74d1ba6 | ||
|
|
80f48cceb4 | ||
|
|
f93883aa20 | ||
|
|
4123934b46 | ||
|
|
2f1df973a6 | ||
|
|
e4472b322b | ||
|
|
76ebda7faf | ||
|
|
94472dd7e5 | ||
|
|
37dd6ec8a3 | ||
|
|
41d87f5c9d | ||
|
|
40f1ab31f5 | ||
|
|
2ae41fa83f | ||
|
|
529af4984c | ||
|
|
d73f977b7a | ||
|
|
afd754e384 | ||
|
|
ea389e7045 | ||
|
|
d9b0c42f5f | ||
|
|
982b8d89b7 | ||
|
|
26df6c7657 | ||
|
|
43f8adf1a5 | ||
|
|
7e978c77b4 | ||
|
|
ecc048bc12 | ||
|
|
593d302f0b | ||
|
|
249126f429 | ||
|
|
96d5e6e50e | ||
|
|
e9071e9871 | ||
|
|
9c9c4cbc3e | ||
|
|
573a7b97c6 | ||
|
|
4598758419 | ||
|
|
7dc4429c9c | ||
|
|
5192ffe5b3 | ||
|
|
3607e3d012 | ||
|
|
e3a3c6d58f | ||
|
|
e5bc38d856 | ||
|
|
b79ac4f0ac | ||
|
|
c84fb5577b | ||
|
|
14e86bde07 | ||
|
|
9571ec7c72 | ||
|
|
e1b5ddb050 | ||
|
|
34519a5ecb | ||
|
|
4cb4c608d0 | ||
|
|
2eec853e9e | ||
|
|
2942eda8e0 | ||
|
|
73b54139d6 | ||
|
|
0f59bb7a99 | ||
|
|
76d0222a83 | ||
|
|
158947f5e5 | ||
|
|
8989b6c4d4 | ||
|
|
57e28e5a73 | ||
|
|
e7480ad29e | ||
|
|
b9244130ef | ||
|
|
9f340861ad | ||
|
|
b16263ebd7 | ||
|
|
5322dd942e | ||
|
|
9a16eaf9ba | ||
|
|
5b2711bbd3 | ||
|
|
132faeae34 | ||
|
|
c517f1c483 | ||
|
|
fd811df414 | ||
|
|
b6c6253bfc | ||
|
|
7bbf32dc0e | ||
|
|
2963004991 | ||
|
|
229ed6dad9 | ||
|
|
37c8560542 | ||
|
|
1a8f172186 | ||
|
|
52126b8fae | ||
|
|
58f9860ba7 | ||
|
|
e3fb817a21 | ||
|
|
e1148e6bdc | ||
|
|
c0fd10e793 | ||
|
|
cac55beb4f | ||
|
|
ce65eb8736 | ||
|
|
f9a56b9a9b | ||
|
|
daed4dcc94 | ||
|
|
a24ee93f23 | ||
|
|
3a24aa5b6d | ||
|
|
a78f7b1e6a | ||
|
|
83318c36aa | ||
|
|
9dd2b71d04 | ||
|
|
6cefde622c | ||
|
|
21b16c1c77 | ||
|
|
9f3103e891 | ||
|
|
8990f97b45 | ||
|
|
ca8ecb1df1 | ||
|
|
e794fa74da | ||
|
|
8451fc36ca | ||
|
|
b0797d329c | ||
|
|
feb1ecbfcd | ||
|
|
45972c23d4 | ||
|
|
a3989095af | ||
|
|
b2c773996d | ||
|
|
caecb2297f | ||
|
|
5259caacae | ||
|
|
01d8c7b769 | ||
|
|
6e0142fe3a | ||
|
|
70c8042c99 | ||
|
|
1053545870 | ||
|
|
9e38161400 | ||
|
|
0d50131d5e | ||
|
|
60ba7cab93 | ||
|
|
ecb68aa5d2 | ||
|
|
86f19cb5d3 | ||
|
|
9a18963364 | ||
|
|
2612ceee37 | ||
|
|
69b72e4a8e | ||
|
|
3314d5c8db | ||
|
|
b11da288d2 | ||
|
|
df7fe19bbe | ||
|
|
fd2cd5f28c | ||
|
|
3716187fc3 | ||
|
|
ec4cf55566 | ||
|
|
03966624ba | ||
|
|
d95f4d68a3 | ||
|
|
1d61541951 | ||
|
|
2574cb0dea | ||
|
|
71f9674835 | ||
|
|
49e2a8633e | ||
|
|
f0940d82dc | ||
|
|
8f60f3aef9 | ||
|
|
800bc01112 | ||
|
|
ac3c04357c | ||
|
|
c31499a411 | ||
|
|
1ae6c82558 | ||
|
|
d2d7deb4ec | ||
|
|
06f13e79b1 | ||
|
|
1c4ab7fafc | ||
|
|
a0ad2d5849 | ||
|
|
2a3819a696 | ||
|
|
0129346eda | ||
|
|
6f8f12f762 | ||
|
|
5041ebe5b2 | ||
|
|
961767e2dd | ||
|
|
d46e12e280 | ||
|
|
5a5188a453 | ||
|
|
58ce19d2c2 | ||
|
|
54df0c9b3a | ||
|
|
7d72300c36 | ||
|
|
e690317e3a | ||
|
|
572caeaa39 | ||
|
|
43cb5a0d54 | ||
|
|
d96b2642bc | ||
|
|
e7ee90a937 | ||
|
|
ec886203fc | ||
|
|
da2c87ce0d | ||
|
|
4792e21416 | ||
|
|
95e509753e | ||
|
|
22ed5048a2 |
@@ -29,22 +29,20 @@ schedules:
|
|||||||
always: true
|
always: true
|
||||||
branches:
|
branches:
|
||||||
include:
|
include:
|
||||||
- stable-9
|
- stable-12
|
||||||
- stable-8
|
- stable-11
|
||||||
- cron: 0 11 * * 0
|
- cron: 0 11 * * 0
|
||||||
displayName: Weekly (old stable branches)
|
displayName: Weekly (old stable branches)
|
||||||
always: true
|
always: true
|
||||||
branches:
|
branches:
|
||||||
include:
|
include:
|
||||||
- stable-7
|
- stable-10
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
- name: checkoutPath
|
- name: checkoutPath
|
||||||
value: ansible_collections/community/general
|
value: ansible_collections/community/general
|
||||||
- name: coverageBranches
|
- name: coverageBranches
|
||||||
value: main
|
value: main
|
||||||
- name: pipelinesCoverage
|
|
||||||
value: coverage
|
|
||||||
- name: entryPoint
|
- name: entryPoint
|
||||||
value: tests/utils/shippable/shippable.sh
|
value: tests/utils/shippable/shippable.sh
|
||||||
- name: fetchDepth
|
- name: fetchDepth
|
||||||
@@ -53,227 +51,179 @@ variables:
|
|||||||
resources:
|
resources:
|
||||||
containers:
|
containers:
|
||||||
- container: default
|
- container: default
|
||||||
image: quay.io/ansible/azure-pipelines-test-container:6.0.0
|
image: quay.io/ansible/azure-pipelines-test-container:7.0.0
|
||||||
|
|
||||||
pool: Standard
|
pool: Standard
|
||||||
|
|
||||||
stages:
|
stages:
|
||||||
### Sanity
|
### Sanity
|
||||||
- stage: Sanity_devel
|
- stage: Sanity_2_20
|
||||||
displayName: Sanity devel
|
displayName: Sanity 2.20
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/matrix.yml
|
- template: templates/matrix.yml
|
||||||
parameters:
|
parameters:
|
||||||
nameFormat: Test {0}
|
nameFormat: Test {0}
|
||||||
testFormat: devel/sanity/{0}
|
testFormat: 2.20/sanity/{0}
|
||||||
targets:
|
targets:
|
||||||
- test: 1
|
- test: 1
|
||||||
- test: 2
|
- test: 2
|
||||||
- test: 3
|
- test: 3
|
||||||
- test: 4
|
- test: 4
|
||||||
- test: extra
|
- stage: Sanity_2_19
|
||||||
- stage: Sanity_2_17
|
displayName: Sanity 2.19
|
||||||
displayName: Sanity 2.17
|
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/matrix.yml
|
- template: templates/matrix.yml
|
||||||
parameters:
|
parameters:
|
||||||
nameFormat: Test {0}
|
nameFormat: Test {0}
|
||||||
testFormat: 2.17/sanity/{0}
|
testFormat: 2.19/sanity/{0}
|
||||||
targets:
|
targets:
|
||||||
- test: 1
|
- test: 1
|
||||||
- test: 2
|
- test: 2
|
||||||
- test: 3
|
- test: 3
|
||||||
- test: 4
|
- test: 4
|
||||||
- stage: Sanity_2_16
|
- stage: Sanity_2_18
|
||||||
displayName: Sanity 2.16
|
displayName: Sanity 2.18
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/matrix.yml
|
- template: templates/matrix.yml
|
||||||
parameters:
|
parameters:
|
||||||
nameFormat: Test {0}
|
nameFormat: Test {0}
|
||||||
testFormat: 2.16/sanity/{0}
|
testFormat: 2.18/sanity/{0}
|
||||||
targets:
|
|
||||||
- test: 1
|
|
||||||
- test: 2
|
|
||||||
- test: 3
|
|
||||||
- test: 4
|
|
||||||
- stage: Sanity_2_15
|
|
||||||
displayName: Sanity 2.15
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Test {0}
|
|
||||||
testFormat: 2.15/sanity/{0}
|
|
||||||
targets:
|
targets:
|
||||||
- test: 1
|
- test: 1
|
||||||
- test: 2
|
- test: 2
|
||||||
- test: 3
|
- test: 3
|
||||||
- test: 4
|
- test: 4
|
||||||
### Units
|
### Units
|
||||||
- stage: Units_devel
|
- stage: Units_2_20
|
||||||
displayName: Units devel
|
displayName: Units 2.20
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/matrix.yml
|
- template: templates/matrix.yml
|
||||||
parameters:
|
parameters:
|
||||||
nameFormat: Python {0}
|
nameFormat: Python {0}
|
||||||
testFormat: devel/units/{0}/1
|
testFormat: 2.20/units/{0}/1
|
||||||
targets:
|
targets:
|
||||||
- test: 3.8
|
|
||||||
- test: 3.9
|
- test: 3.9
|
||||||
- test: '3.10'
|
- test: '3.10'
|
||||||
- test: '3.11'
|
- test: '3.11'
|
||||||
- test: '3.12'
|
- test: '3.12'
|
||||||
- test: '3.13'
|
- test: '3.13'
|
||||||
- stage: Units_2_17
|
- test: '3.14'
|
||||||
displayName: Units 2.17
|
- stage: Units_2_19
|
||||||
|
displayName: Units 2.19
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/matrix.yml
|
- template: templates/matrix.yml
|
||||||
parameters:
|
parameters:
|
||||||
nameFormat: Python {0}
|
nameFormat: Python {0}
|
||||||
testFormat: 2.17/units/{0}/1
|
testFormat: 2.19/units/{0}/1
|
||||||
targets:
|
targets:
|
||||||
- test: 3.7
|
- test: 3.8
|
||||||
- test: "3.12"
|
|
||||||
- stage: Units_2_16
|
|
||||||
displayName: Units 2.16
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.16/units/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 2.7
|
|
||||||
- test: 3.6
|
|
||||||
- test: "3.11"
|
- test: "3.11"
|
||||||
- stage: Units_2_15
|
- test: "3.13"
|
||||||
displayName: Units 2.15
|
- stage: Units_2_18
|
||||||
|
displayName: Units 2.18
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/matrix.yml
|
- template: templates/matrix.yml
|
||||||
parameters:
|
parameters:
|
||||||
nameFormat: Python {0}
|
nameFormat: Python {0}
|
||||||
testFormat: 2.15/units/{0}/1
|
testFormat: 2.18/units/{0}/1
|
||||||
targets:
|
targets:
|
||||||
- test: 3.5
|
- test: 3.8
|
||||||
- test: "3.10"
|
- test: "3.11"
|
||||||
|
- test: "3.13"
|
||||||
|
|
||||||
## Remote
|
## Remote
|
||||||
- stage: Remote_devel_extra_vms
|
- stage: Remote_2_20_extra_vms
|
||||||
displayName: Remote devel extra VMs
|
displayName: Remote 2.20 extra VMs
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/matrix.yml
|
- template: templates/matrix.yml
|
||||||
parameters:
|
parameters:
|
||||||
testFormat: devel/{0}
|
testFormat: 2.20/{0}
|
||||||
targets:
|
targets:
|
||||||
- name: Alpine 3.20
|
- name: Alpine 3.22
|
||||||
test: alpine/3.20
|
test: alpine/3.22
|
||||||
# - name: Fedora 40
|
# - name: Fedora 42
|
||||||
# test: fedora/40
|
# test: fedora/42
|
||||||
- name: Ubuntu 22.04
|
- name: Ubuntu 22.04
|
||||||
test: ubuntu/22.04
|
test: ubuntu/22.04
|
||||||
- name: Ubuntu 24.04
|
- name: Ubuntu 24.04
|
||||||
test: ubuntu/24.04
|
test: ubuntu/24.04
|
||||||
groups:
|
groups:
|
||||||
- vm
|
- vm
|
||||||
- stage: Remote_devel
|
- stage: Remote_2_20
|
||||||
displayName: Remote devel
|
displayName: Remote 2.20
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/matrix.yml
|
- template: templates/matrix.yml
|
||||||
parameters:
|
parameters:
|
||||||
testFormat: devel/{0}
|
testFormat: 2.20/{0}
|
||||||
targets:
|
targets:
|
||||||
- name: macOS 14.3
|
- name: macOS 15.3
|
||||||
test: macos/14.3
|
test: macos/15.3
|
||||||
- name: RHEL 9.4
|
- name: RHEL 10.1
|
||||||
test: rhel/9.4
|
test: rhel/10.1
|
||||||
|
- name: RHEL 9.7
|
||||||
|
test: rhel/9.7
|
||||||
|
- name: FreeBSD 14.3
|
||||||
|
test: freebsd/14.3
|
||||||
|
- name: FreeBSD 13.5
|
||||||
|
test: freebsd/13.5
|
||||||
|
groups:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
- stage: Remote_2_19
|
||||||
|
displayName: Remote 2.19
|
||||||
|
dependsOn: []
|
||||||
|
jobs:
|
||||||
|
- template: templates/matrix.yml
|
||||||
|
parameters:
|
||||||
|
testFormat: 2.19/{0}
|
||||||
|
targets:
|
||||||
|
- name: RHEL 10.1
|
||||||
|
test: rhel/10.1
|
||||||
|
- name: FreeBSD 14.2
|
||||||
|
test: freebsd/14.2
|
||||||
|
groups:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
- stage: Remote_2_18
|
||||||
|
displayName: Remote 2.18
|
||||||
|
dependsOn: []
|
||||||
|
jobs:
|
||||||
|
- template: templates/matrix.yml
|
||||||
|
parameters:
|
||||||
|
testFormat: 2.18/{0}
|
||||||
|
targets:
|
||||||
|
# - name: macOS 14.3
|
||||||
|
# test: macos/14.3
|
||||||
- name: FreeBSD 14.1
|
- name: FreeBSD 14.1
|
||||||
test: freebsd/14.1
|
test: freebsd/14.1
|
||||||
groups:
|
groups:
|
||||||
- 1
|
- 1
|
||||||
- 2
|
- 2
|
||||||
- 3
|
- 3
|
||||||
- stage: Remote_2_17
|
|
||||||
displayName: Remote 2.17
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.17/{0}
|
|
||||||
targets:
|
|
||||||
- name: FreeBSD 13.3
|
|
||||||
test: freebsd/13.3
|
|
||||||
- name: RHEL 9.3
|
|
||||||
test: rhel/9.3
|
|
||||||
- name: FreeBSD 14.0
|
|
||||||
test: freebsd/14.0
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- stage: Remote_2_16
|
|
||||||
displayName: Remote 2.16
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.16/{0}
|
|
||||||
targets:
|
|
||||||
- name: macOS 13.2
|
|
||||||
test: macos/13.2
|
|
||||||
- name: RHEL 9.2
|
|
||||||
test: rhel/9.2
|
|
||||||
- name: RHEL 8.8
|
|
||||||
test: rhel/8.8
|
|
||||||
# - name: FreeBSD 13.2
|
|
||||||
# test: freebsd/13.2
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- stage: Remote_2_15
|
|
||||||
displayName: Remote 2.15
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.15/{0}
|
|
||||||
targets:
|
|
||||||
- name: RHEL 9.1
|
|
||||||
test: rhel/9.1
|
|
||||||
- name: RHEL 8.7
|
|
||||||
test: rhel/8.7
|
|
||||||
- name: RHEL 7.9
|
|
||||||
test: rhel/7.9
|
|
||||||
# - name: FreeBSD 13.1
|
|
||||||
# test: freebsd/13.1
|
|
||||||
# - name: FreeBSD 12.4
|
|
||||||
# test: freebsd/12.4
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
|
|
||||||
### Docker
|
### Docker
|
||||||
- stage: Docker_devel
|
- stage: Docker_2_20
|
||||||
displayName: Docker devel
|
displayName: Docker 2.20
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/matrix.yml
|
- template: templates/matrix.yml
|
||||||
parameters:
|
parameters:
|
||||||
testFormat: devel/linux/{0}
|
testFormat: 2.20/linux/{0}
|
||||||
targets:
|
targets:
|
||||||
- name: Fedora 40
|
- name: Fedora 42
|
||||||
test: fedora40
|
test: fedora42
|
||||||
- name: Alpine 3.20
|
- name: Alpine 3.22
|
||||||
test: alpine320
|
test: alpine322
|
||||||
- name: Ubuntu 22.04
|
- name: Ubuntu 22.04
|
||||||
test: ubuntu2204
|
test: ubuntu2204
|
||||||
- name: Ubuntu 24.04
|
- name: Ubuntu 24.04
|
||||||
@@ -282,151 +232,120 @@ stages:
|
|||||||
- 1
|
- 1
|
||||||
- 2
|
- 2
|
||||||
- 3
|
- 3
|
||||||
- stage: Docker_2_17
|
- stage: Docker_2_19
|
||||||
displayName: Docker 2.17
|
displayName: Docker 2.19
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/matrix.yml
|
- template: templates/matrix.yml
|
||||||
parameters:
|
parameters:
|
||||||
testFormat: 2.17/linux/{0}
|
testFormat: 2.19/linux/{0}
|
||||||
targets:
|
targets:
|
||||||
- name: Fedora 39
|
- name: Fedora 41
|
||||||
test: fedora39
|
test: fedora41
|
||||||
- name: Alpine 3.19
|
- name: Alpine 3.21
|
||||||
test: alpine319
|
test: alpine321
|
||||||
- name: Ubuntu 20.04
|
|
||||||
test: ubuntu2004
|
|
||||||
groups:
|
groups:
|
||||||
- 1
|
- 1
|
||||||
- 2
|
- 2
|
||||||
- 3
|
- 3
|
||||||
- stage: Docker_2_16
|
- stage: Docker_2_18
|
||||||
displayName: Docker 2.16
|
displayName: Docker 2.18
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/matrix.yml
|
- template: templates/matrix.yml
|
||||||
parameters:
|
parameters:
|
||||||
testFormat: 2.16/linux/{0}
|
testFormat: 2.18/linux/{0}
|
||||||
targets:
|
targets:
|
||||||
- name: Fedora 38
|
- name: Fedora 40
|
||||||
test: fedora38
|
test: fedora40
|
||||||
- name: openSUSE 15
|
- name: Alpine 3.20
|
||||||
test: opensuse15
|
test: alpine320
|
||||||
- name: Alpine 3
|
- name: Ubuntu 24.04
|
||||||
test: alpine3
|
test: ubuntu2404
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- stage: Docker_2_15
|
|
||||||
displayName: Docker 2.15
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.15/linux/{0}
|
|
||||||
targets:
|
|
||||||
- name: Fedora 37
|
|
||||||
test: fedora37
|
|
||||||
- name: CentOS 7
|
|
||||||
test: centos7
|
|
||||||
groups:
|
groups:
|
||||||
- 1
|
- 1
|
||||||
- 2
|
- 2
|
||||||
- 3
|
- 3
|
||||||
|
|
||||||
### Community Docker
|
### Community Docker
|
||||||
- stage: Docker_community_devel
|
- stage: Docker_community_2_20
|
||||||
displayName: Docker (community images) devel
|
displayName: Docker (community images) 2.20
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/matrix.yml
|
- template: templates/matrix.yml
|
||||||
parameters:
|
parameters:
|
||||||
testFormat: devel/linux-community/{0}
|
testFormat: 2.20/linux-community/{0}
|
||||||
targets:
|
targets:
|
||||||
- name: Debian Bullseye
|
- name: Debian 11 Bullseye
|
||||||
test: debian-bullseye/3.9
|
test: debian-bullseye/3.9
|
||||||
- name: Debian Bookworm
|
- name: Debian 12 Bookworm
|
||||||
test: debian-bookworm/3.11
|
test: debian-bookworm/3.11
|
||||||
|
- name: Debian 13 Trixie
|
||||||
|
test: debian-13-trixie/3.13
|
||||||
- name: ArchLinux
|
- name: ArchLinux
|
||||||
test: archlinux/3.12
|
test: archlinux/3.14
|
||||||
groups:
|
groups:
|
||||||
- 1
|
- 1
|
||||||
- 2
|
- 2
|
||||||
- 3
|
- 3
|
||||||
|
|
||||||
### Generic
|
### Generic
|
||||||
- stage: Generic_devel
|
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
|
||||||
displayName: Generic devel
|
# - stage: Generic_2_20
|
||||||
dependsOn: []
|
# displayName: Generic 2.20
|
||||||
jobs:
|
# dependsOn: []
|
||||||
- template: templates/matrix.yml
|
# jobs:
|
||||||
parameters:
|
# - template: templates/matrix.yml
|
||||||
nameFormat: Python {0}
|
# parameters:
|
||||||
testFormat: devel/generic/{0}/1
|
# nameFormat: Python {0}
|
||||||
targets:
|
# testFormat: 2.20/generic/{0}/1
|
||||||
- test: '3.8'
|
# targets:
|
||||||
- test: '3.11'
|
# - test: '3.9'
|
||||||
- test: '3.13'
|
# - test: '3.12'
|
||||||
- stage: Generic_2_17
|
# - test: '3.14'
|
||||||
displayName: Generic 2.17
|
# - stage: Generic_2_19
|
||||||
dependsOn: []
|
# displayName: Generic 2.19
|
||||||
jobs:
|
# dependsOn: []
|
||||||
- template: templates/matrix.yml
|
# jobs:
|
||||||
parameters:
|
# - template: templates/matrix.yml
|
||||||
nameFormat: Python {0}
|
# parameters:
|
||||||
testFormat: 2.17/generic/{0}/1
|
# nameFormat: Python {0}
|
||||||
targets:
|
# testFormat: 2.19/generic/{0}/1
|
||||||
- test: '3.7'
|
# targets:
|
||||||
- test: '3.12'
|
# - test: '3.9'
|
||||||
- stage: Generic_2_16
|
# - test: '3.13'
|
||||||
displayName: Generic 2.16
|
# - stage: Generic_2_18
|
||||||
dependsOn: []
|
# displayName: Generic 2.18
|
||||||
jobs:
|
# dependsOn: []
|
||||||
- template: templates/matrix.yml
|
# jobs:
|
||||||
parameters:
|
# - template: templates/matrix.yml
|
||||||
nameFormat: Python {0}
|
# parameters:
|
||||||
testFormat: 2.16/generic/{0}/1
|
# nameFormat: Python {0}
|
||||||
targets:
|
# testFormat: 2.18/generic/{0}/1
|
||||||
- test: '2.7'
|
# targets:
|
||||||
- test: '3.6'
|
# - test: '3.8'
|
||||||
- test: '3.11'
|
# - test: '3.13'
|
||||||
- stage: Generic_2_15
|
|
||||||
displayName: Generic 2.15
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.15/generic/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: '3.9'
|
|
||||||
|
|
||||||
- stage: Summary
|
- stage: Summary
|
||||||
condition: succeededOrFailed()
|
condition: succeededOrFailed()
|
||||||
dependsOn:
|
dependsOn:
|
||||||
- Sanity_devel
|
- Sanity_2_20
|
||||||
- Sanity_2_17
|
- Sanity_2_19
|
||||||
- Sanity_2_16
|
- Sanity_2_18
|
||||||
- Sanity_2_15
|
- Units_2_20
|
||||||
- Units_devel
|
- Units_2_19
|
||||||
- Units_2_17
|
- Units_2_18
|
||||||
- Units_2_16
|
- Remote_2_20_extra_vms
|
||||||
- Units_2_15
|
- Remote_2_20
|
||||||
- Remote_devel_extra_vms
|
- Remote_2_19
|
||||||
- Remote_devel
|
- Remote_2_18
|
||||||
- Remote_2_17
|
- Docker_2_20
|
||||||
- Remote_2_16
|
- Docker_2_19
|
||||||
- Remote_2_15
|
- Docker_2_18
|
||||||
- Docker_devel
|
- Docker_community_2_20
|
||||||
- Docker_2_17
|
|
||||||
- Docker_2_16
|
|
||||||
- Docker_2_15
|
|
||||||
- Docker_community_devel
|
|
||||||
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
|
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
|
||||||
# - Generic_devel
|
# - Generic_2_20
|
||||||
# - Generic_2_17
|
# - Generic_2_19
|
||||||
# - Generic_2_16
|
# - Generic_2_18
|
||||||
# - Generic_2_15
|
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/coverage.yml
|
- template: templates/coverage.yml
|
||||||
|
|||||||
@@ -28,16 +28,6 @@ jobs:
|
|||||||
- bash: .azure-pipelines/scripts/report-coverage.sh
|
- bash: .azure-pipelines/scripts/report-coverage.sh
|
||||||
displayName: Generate Coverage Report
|
displayName: Generate Coverage Report
|
||||||
condition: gt(variables.coverageFileCount, 0)
|
condition: gt(variables.coverageFileCount, 0)
|
||||||
- task: PublishCodeCoverageResults@1
|
|
||||||
inputs:
|
|
||||||
codeCoverageTool: Cobertura
|
|
||||||
# Azure Pipelines only accepts a single coverage data file.
|
|
||||||
# That means only Python or PowerShell coverage can be uploaded, but not both.
|
|
||||||
# Set the "pipelinesCoverage" variable to determine which type is uploaded.
|
|
||||||
# Use "coverage" for Python and "coverage-powershell" for PowerShell.
|
|
||||||
summaryFileLocation: "$(outputPath)/reports/$(pipelinesCoverage).xml"
|
|
||||||
displayName: Publish to Azure Pipelines
|
|
||||||
condition: gt(variables.coverageFileCount, 0)
|
|
||||||
- bash: .azure-pipelines/scripts/publish-codecov.py "$(outputPath)"
|
- bash: .azure-pipelines/scripts/publish-codecov.py "$(outputPath)"
|
||||||
displayName: Publish to codecov.io
|
displayName: Publish to codecov.io
|
||||||
condition: gt(variables.coverageFileCount, 0)
|
condition: gt(variables.coverageFileCount, 0)
|
||||||
|
|||||||
@@ -50,11 +50,11 @@ jobs:
|
|||||||
parameters:
|
parameters:
|
||||||
jobs:
|
jobs:
|
||||||
- ${{ if eq(length(parameters.groups), 0) }}:
|
- ${{ if eq(length(parameters.groups), 0) }}:
|
||||||
- ${{ each target in parameters.targets }}:
|
|
||||||
- name: ${{ format(parameters.nameFormat, coalesce(target.name, target.test)) }}
|
|
||||||
test: ${{ format(parameters.testFormat, coalesce(target.test, target.name)) }}
|
|
||||||
- ${{ if not(eq(length(parameters.groups), 0)) }}:
|
|
||||||
- ${{ each group in parameters.groups }}:
|
|
||||||
- ${{ each target in parameters.targets }}:
|
- ${{ each target in parameters.targets }}:
|
||||||
- name: ${{ format(format(parameters.nameGroupFormat, parameters.nameFormat), coalesce(target.name, target.test), group) }}
|
- name: ${{ format(parameters.nameFormat, coalesce(target.name, target.test)) }}
|
||||||
test: ${{ format(format(parameters.testGroupFormat, parameters.testFormat), coalesce(target.test, target.name), group) }}
|
test: ${{ format(parameters.testFormat, coalesce(target.test, target.name)) }}
|
||||||
|
- ${{ if not(eq(length(parameters.groups), 0)) }}:
|
||||||
|
- ${{ each group in parameters.groups }}:
|
||||||
|
- ${{ each target in parameters.targets }}:
|
||||||
|
- name: ${{ format(format(parameters.nameGroupFormat, parameters.nameFormat), coalesce(target.name, target.test), group) }}
|
||||||
|
test: ${{ format(format(parameters.testGroupFormat, parameters.testFormat), coalesce(target.test, target.name), group) }}
|
||||||
|
|||||||
@@ -14,37 +14,37 @@ parameters:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
- ${{ each job in parameters.jobs }}:
|
- ${{ each job in parameters.jobs }}:
|
||||||
- job: test_${{ replace(replace(replace(job.test, '/', '_'), '.', '_'), '-', '_') }}
|
- job: test_${{ replace(replace(replace(job.test, '/', '_'), '.', '_'), '-', '_') }}
|
||||||
displayName: ${{ job.name }}
|
displayName: ${{ job.name }}
|
||||||
container: default
|
container: default
|
||||||
workspace:
|
workspace:
|
||||||
clean: all
|
clean: all
|
||||||
steps:
|
steps:
|
||||||
- checkout: self
|
- checkout: self
|
||||||
fetchDepth: $(fetchDepth)
|
fetchDepth: $(fetchDepth)
|
||||||
path: $(checkoutPath)
|
path: $(checkoutPath)
|
||||||
- bash: .azure-pipelines/scripts/run-tests.sh "$(entryPoint)" "${{ job.test }}" "$(coverageBranches)"
|
- bash: .azure-pipelines/scripts/run-tests.sh "$(entryPoint)" "${{ job.test }}" "$(coverageBranches)"
|
||||||
displayName: Run Tests
|
displayName: Run Tests
|
||||||
- bash: .azure-pipelines/scripts/process-results.sh
|
- bash: .azure-pipelines/scripts/process-results.sh
|
||||||
condition: succeededOrFailed()
|
condition: succeededOrFailed()
|
||||||
displayName: Process Results
|
displayName: Process Results
|
||||||
- bash: .azure-pipelines/scripts/aggregate-coverage.sh "$(Agent.TempDirectory)"
|
- bash: .azure-pipelines/scripts/aggregate-coverage.sh "$(Agent.TempDirectory)"
|
||||||
condition: eq(variables.haveCoverageData, 'true')
|
condition: eq(variables.haveCoverageData, 'true')
|
||||||
displayName: Aggregate Coverage Data
|
displayName: Aggregate Coverage Data
|
||||||
- task: PublishTestResults@2
|
- task: PublishTestResults@2
|
||||||
condition: eq(variables.haveTestResults, 'true')
|
condition: eq(variables.haveTestResults, 'true')
|
||||||
inputs:
|
inputs:
|
||||||
testResultsFiles: "$(outputPath)/junit/*.xml"
|
testResultsFiles: "$(outputPath)/junit/*.xml"
|
||||||
displayName: Publish Test Results
|
displayName: Publish Test Results
|
||||||
- task: PublishPipelineArtifact@1
|
- task: PublishPipelineArtifact@1
|
||||||
condition: eq(variables.haveBotResults, 'true')
|
condition: eq(variables.haveBotResults, 'true')
|
||||||
displayName: Publish Bot Results
|
displayName: Publish Bot Results
|
||||||
inputs:
|
inputs:
|
||||||
targetPath: "$(outputPath)/bot/"
|
targetPath: "$(outputPath)/bot/"
|
||||||
artifactName: "Bot $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
|
artifactName: "Bot $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
|
||||||
- task: PublishPipelineArtifact@1
|
- task: PublishPipelineArtifact@1
|
||||||
condition: eq(variables.haveCoverageData, 'true')
|
condition: eq(variables.haveCoverageData, 'true')
|
||||||
displayName: Publish Coverage Data
|
displayName: Publish Coverage Data
|
||||||
inputs:
|
inputs:
|
||||||
targetPath: "$(Agent.TempDirectory)/coverage/"
|
targetPath: "$(Agent.TempDirectory)/coverage/"
|
||||||
artifactName: "Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
|
artifactName: "Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
|
||||||
|
|||||||
9
.git-blame-ignore-revs
Normal file
9
.git-blame-ignore-revs
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Copyright (c) Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# YAML reformatting
|
||||||
|
2b4882549908b5b1fafe5fa10efb47f613a71f94
|
||||||
|
8196cacff8e83dc5d7fb88b43ef3cab5d3751c39
|
||||||
|
bd4f1a3e5ca1af5afc53636c36767e81a4566978
|
||||||
|
a9e892952deef6f91977d7032dd95237a9867509
|
||||||
101
.github/BOTMETA.yml
vendored
101
.github/BOTMETA.yml
vendored
@@ -61,7 +61,6 @@ files:
|
|||||||
$callbacks/elastic.py:
|
$callbacks/elastic.py:
|
||||||
keywords: apm observability
|
keywords: apm observability
|
||||||
maintainers: v1v
|
maintainers: v1v
|
||||||
$callbacks/hipchat.py: {}
|
|
||||||
$callbacks/jabber.py: {}
|
$callbacks/jabber.py: {}
|
||||||
$callbacks/log_plays.py: {}
|
$callbacks/log_plays.py: {}
|
||||||
$callbacks/loganalytics.py:
|
$callbacks/loganalytics.py:
|
||||||
@@ -78,6 +77,8 @@ files:
|
|||||||
$callbacks/opentelemetry.py:
|
$callbacks/opentelemetry.py:
|
||||||
keywords: opentelemetry observability
|
keywords: opentelemetry observability
|
||||||
maintainers: v1v
|
maintainers: v1v
|
||||||
|
$callbacks/print_task.py:
|
||||||
|
maintainers: demonpig
|
||||||
$callbacks/say.py:
|
$callbacks/say.py:
|
||||||
keywords: brew cask darwin homebrew macosx macports osx
|
keywords: brew cask darwin homebrew macosx macports osx
|
||||||
labels: macos say
|
labels: macos say
|
||||||
@@ -112,15 +113,22 @@ files:
|
|||||||
$connections/lxd.py:
|
$connections/lxd.py:
|
||||||
labels: lxd
|
labels: lxd
|
||||||
maintainers: mattclay
|
maintainers: mattclay
|
||||||
|
$connections/proxmox_pct_remote.py:
|
||||||
|
labels: proxmox
|
||||||
|
maintainers: mietzen
|
||||||
$connections/qubes.py:
|
$connections/qubes.py:
|
||||||
maintainers: kushaldas
|
maintainers: kushaldas
|
||||||
$connections/saltstack.py:
|
$connections/saltstack.py:
|
||||||
labels: saltstack
|
labels: saltstack
|
||||||
maintainers: mscherer
|
maintainers: mscherer
|
||||||
|
$connections/wsl.py:
|
||||||
|
maintainers: rgl
|
||||||
$connections/zone.py:
|
$connections/zone.py:
|
||||||
maintainers: $team_ansible_core
|
maintainers: $team_ansible_core
|
||||||
$doc_fragments/:
|
$doc_fragments/:
|
||||||
labels: docs_fragments
|
labels: docs_fragments
|
||||||
|
$doc_fragments/clc.py:
|
||||||
|
maintainers: clc-runner russoz
|
||||||
$doc_fragments/django.py:
|
$doc_fragments/django.py:
|
||||||
maintainers: russoz
|
maintainers: russoz
|
||||||
$doc_fragments/hpe3par.py:
|
$doc_fragments/hpe3par.py:
|
||||||
@@ -131,9 +139,13 @@ files:
|
|||||||
maintainers: $team_huawei
|
maintainers: $team_huawei
|
||||||
$doc_fragments/nomad.py:
|
$doc_fragments/nomad.py:
|
||||||
maintainers: chris93111 apecnascimento
|
maintainers: chris93111 apecnascimento
|
||||||
|
$doc_fragments/pipx.py:
|
||||||
|
maintainers: russoz
|
||||||
$doc_fragments/xenserver.py:
|
$doc_fragments/xenserver.py:
|
||||||
labels: xenserver
|
labels: xenserver
|
||||||
maintainers: bvitnik
|
maintainers: bvitnik
|
||||||
|
$filters/accumulate.py:
|
||||||
|
maintainers: VannTen
|
||||||
$filters/counter.py:
|
$filters/counter.py:
|
||||||
maintainers: keilr
|
maintainers: keilr
|
||||||
$filters/crc32.py:
|
$filters/crc32.py:
|
||||||
@@ -156,6 +168,14 @@ files:
|
|||||||
maintainers: Ajpantuso
|
maintainers: Ajpantuso
|
||||||
$filters/jc.py:
|
$filters/jc.py:
|
||||||
maintainers: kellyjonbrazil
|
maintainers: kellyjonbrazil
|
||||||
|
$filters/json_diff.yml:
|
||||||
|
maintainers: numo68
|
||||||
|
$filters/json_patch.py:
|
||||||
|
maintainers: numo68
|
||||||
|
$filters/json_patch.yml:
|
||||||
|
maintainers: numo68
|
||||||
|
$filters/json_patch_recipe.yml:
|
||||||
|
maintainers: numo68
|
||||||
$filters/json_query.py: {}
|
$filters/json_query.py: {}
|
||||||
$filters/keep_keys.py:
|
$filters/keep_keys.py:
|
||||||
maintainers: vbotka
|
maintainers: vbotka
|
||||||
@@ -192,6 +212,8 @@ files:
|
|||||||
maintainers: resmo
|
maintainers: resmo
|
||||||
$filters/to_months.yml:
|
$filters/to_months.yml:
|
||||||
maintainers: resmo
|
maintainers: resmo
|
||||||
|
$filters/to_prettytable.py:
|
||||||
|
maintainers: tgadiev
|
||||||
$filters/to_seconds.yml:
|
$filters/to_seconds.yml:
|
||||||
maintainers: resmo
|
maintainers: resmo
|
||||||
$filters/to_time_unit.yml:
|
$filters/to_time_unit.yml:
|
||||||
@@ -210,6 +232,8 @@ files:
|
|||||||
maintainers: opoplawski
|
maintainers: opoplawski
|
||||||
$inventories/gitlab_runners.py:
|
$inventories/gitlab_runners.py:
|
||||||
maintainers: morph027
|
maintainers: morph027
|
||||||
|
$inventories/iocage.py:
|
||||||
|
maintainers: vbotka
|
||||||
$inventories/icinga2.py:
|
$inventories/icinga2.py:
|
||||||
maintainers: BongoEADGC6
|
maintainers: BongoEADGC6
|
||||||
$inventories/linode.py:
|
$inventories/linode.py:
|
||||||
@@ -289,6 +313,8 @@ files:
|
|||||||
$lookups/onepassword_raw.py:
|
$lookups/onepassword_raw.py:
|
||||||
ignore: scottsb
|
ignore: scottsb
|
||||||
maintainers: azenk
|
maintainers: azenk
|
||||||
|
$lookups/onepassword_ssh_key.py:
|
||||||
|
maintainers: mohammedbabelly20
|
||||||
$lookups/passwordstore.py: {}
|
$lookups/passwordstore.py: {}
|
||||||
$lookups/random_pet.py:
|
$lookups/random_pet.py:
|
||||||
maintainers: Akasurde
|
maintainers: Akasurde
|
||||||
@@ -306,8 +332,12 @@ files:
|
|||||||
maintainers: delineaKrehl tylerezimmerman
|
maintainers: delineaKrehl tylerezimmerman
|
||||||
$module_utils/:
|
$module_utils/:
|
||||||
labels: module_utils
|
labels: module_utils
|
||||||
|
$module_utils/android_sdkmanager.py:
|
||||||
|
maintainers: shamilovstas
|
||||||
$module_utils/btrfs.py:
|
$module_utils/btrfs.py:
|
||||||
maintainers: gnfzdz
|
maintainers: gnfzdz
|
||||||
|
$module_utils/cmd_runner_fmt.py:
|
||||||
|
maintainers: russoz
|
||||||
$module_utils/cmd_runner.py:
|
$module_utils/cmd_runner.py:
|
||||||
maintainers: russoz
|
maintainers: russoz
|
||||||
$module_utils/deps.py:
|
$module_utils/deps.py:
|
||||||
@@ -354,9 +384,13 @@ files:
|
|||||||
$module_utils/oracle/oci_utils.py:
|
$module_utils/oracle/oci_utils.py:
|
||||||
labels: cloud
|
labels: cloud
|
||||||
maintainers: $team_oracle
|
maintainers: $team_oracle
|
||||||
|
$module_utils/pacemaker.py:
|
||||||
|
maintainers: munchtoast
|
||||||
$module_utils/pipx.py:
|
$module_utils/pipx.py:
|
||||||
labels: pipx
|
labels: pipx
|
||||||
maintainers: russoz
|
maintainers: russoz
|
||||||
|
$module_utils/pkg_req.py:
|
||||||
|
maintainers: russoz
|
||||||
$module_utils/python_runner.py:
|
$module_utils/python_runner.py:
|
||||||
maintainers: russoz
|
maintainers: russoz
|
||||||
$module_utils/puppet.py:
|
$module_utils/puppet.py:
|
||||||
@@ -378,6 +412,8 @@ files:
|
|||||||
maintainers: russoz
|
maintainers: russoz
|
||||||
$module_utils/ssh.py:
|
$module_utils/ssh.py:
|
||||||
maintainers: russoz
|
maintainers: russoz
|
||||||
|
$module_utils/systemd.py:
|
||||||
|
maintainers: NomakCooper
|
||||||
$module_utils/storage/hpe3par/hpe3par.py:
|
$module_utils/storage/hpe3par/hpe3par.py:
|
||||||
maintainers: farhan7500 gautamphegde
|
maintainers: farhan7500 gautamphegde
|
||||||
$module_utils/utm_utils.py:
|
$module_utils/utm_utils.py:
|
||||||
@@ -389,6 +425,8 @@ files:
|
|||||||
$module_utils/wdc_redfish_utils.py:
|
$module_utils/wdc_redfish_utils.py:
|
||||||
labels: wdc_redfish_utils
|
labels: wdc_redfish_utils
|
||||||
maintainers: $team_wdc
|
maintainers: $team_wdc
|
||||||
|
$module_utils/xdg_mime.py:
|
||||||
|
maintainers: mhalano
|
||||||
$module_utils/xenserver.py:
|
$module_utils/xenserver.py:
|
||||||
labels: xenserver
|
labels: xenserver
|
||||||
maintainers: bvitnik
|
maintainers: bvitnik
|
||||||
@@ -415,6 +453,8 @@ files:
|
|||||||
ignore: DavidWittman jiuka
|
ignore: DavidWittman jiuka
|
||||||
labels: alternatives
|
labels: alternatives
|
||||||
maintainers: mulby
|
maintainers: mulby
|
||||||
|
$modules/android_sdk.py:
|
||||||
|
maintainers: shamilovstas
|
||||||
$modules/ansible_galaxy_install.py:
|
$modules/ansible_galaxy_install.py:
|
||||||
maintainers: russoz
|
maintainers: russoz
|
||||||
$modules/apache2_mod_proxy.py:
|
$modules/apache2_mod_proxy.py:
|
||||||
@@ -445,7 +485,7 @@ files:
|
|||||||
$modules/bearychat.py:
|
$modules/bearychat.py:
|
||||||
maintainers: tonyseek
|
maintainers: tonyseek
|
||||||
$modules/bigpanda.py:
|
$modules/bigpanda.py:
|
||||||
maintainers: hkariti
|
ignore: hkariti
|
||||||
$modules/bitbucket_:
|
$modules/bitbucket_:
|
||||||
maintainers: catcombo
|
maintainers: catcombo
|
||||||
$modules/bootc_manage.py:
|
$modules/bootc_manage.py:
|
||||||
@@ -503,6 +543,8 @@ files:
|
|||||||
ignore: skornehl
|
ignore: skornehl
|
||||||
$modules/dconf.py:
|
$modules/dconf.py:
|
||||||
maintainers: azaghal
|
maintainers: azaghal
|
||||||
|
$modules/decompress.py:
|
||||||
|
maintainers: shamilovstas
|
||||||
$modules/deploy_helper.py:
|
$modules/deploy_helper.py:
|
||||||
maintainers: ramondelafuente
|
maintainers: ramondelafuente
|
||||||
$modules/dimensiondata_network.py:
|
$modules/dimensiondata_network.py:
|
||||||
@@ -712,6 +754,8 @@ files:
|
|||||||
$modules/ipa_:
|
$modules/ipa_:
|
||||||
maintainers: $team_ipa
|
maintainers: $team_ipa
|
||||||
ignore: fxfitz
|
ignore: fxfitz
|
||||||
|
$modules/ipa_getkeytab.py:
|
||||||
|
maintainers: abakanovskii
|
||||||
$modules/ipa_dnsrecord.py:
|
$modules/ipa_dnsrecord.py:
|
||||||
maintainers: $team_ipa jwbernin
|
maintainers: $team_ipa jwbernin
|
||||||
$modules/ipbase_info.py:
|
$modules/ipbase_info.py:
|
||||||
@@ -757,6 +801,8 @@ files:
|
|||||||
maintainers: sermilrod
|
maintainers: sermilrod
|
||||||
$modules/jenkins_job_info.py:
|
$modules/jenkins_job_info.py:
|
||||||
maintainers: stpierre
|
maintainers: stpierre
|
||||||
|
$modules/jenkins_node.py:
|
||||||
|
maintainers: phyrwork
|
||||||
$modules/jenkins_plugin.py:
|
$modules/jenkins_plugin.py:
|
||||||
maintainers: jtyr
|
maintainers: jtyr
|
||||||
$modules/jenkins_script.py:
|
$modules/jenkins_script.py:
|
||||||
@@ -793,6 +839,8 @@ files:
|
|||||||
maintainers: fynncfchen johncant
|
maintainers: fynncfchen johncant
|
||||||
$modules/keycloak_clientsecret_regenerate.py:
|
$modules/keycloak_clientsecret_regenerate.py:
|
||||||
maintainers: fynncfchen johncant
|
maintainers: fynncfchen johncant
|
||||||
|
$modules/keycloak_component.py:
|
||||||
|
maintainers: fivetide
|
||||||
$modules/keycloak_group.py:
|
$modules/keycloak_group.py:
|
||||||
maintainers: adamgoossens
|
maintainers: adamgoossens
|
||||||
$modules/keycloak_identity_provider.py:
|
$modules/keycloak_identity_provider.py:
|
||||||
@@ -825,6 +873,8 @@ files:
|
|||||||
maintainers: ahussey-redhat
|
maintainers: ahussey-redhat
|
||||||
$modules/kibana_plugin.py:
|
$modules/kibana_plugin.py:
|
||||||
maintainers: barryib
|
maintainers: barryib
|
||||||
|
$modules/krb_ticket.py:
|
||||||
|
maintainers: abakanovskii
|
||||||
$modules/launchd.py:
|
$modules/launchd.py:
|
||||||
maintainers: martinm82
|
maintainers: martinm82
|
||||||
$modules/layman.py:
|
$modules/layman.py:
|
||||||
@@ -835,6 +885,8 @@ files:
|
|||||||
maintainers: drybjed jtyr noles
|
maintainers: drybjed jtyr noles
|
||||||
$modules/ldap_entry.py:
|
$modules/ldap_entry.py:
|
||||||
maintainers: jtyr
|
maintainers: jtyr
|
||||||
|
$modules/ldap_inc.py:
|
||||||
|
maintainers: pduveau
|
||||||
$modules/ldap_passwd.py:
|
$modules/ldap_passwd.py:
|
||||||
maintainers: KellerFuchs jtyr
|
maintainers: KellerFuchs jtyr
|
||||||
$modules/ldap_search.py:
|
$modules/ldap_search.py:
|
||||||
@@ -1016,6 +1068,8 @@ files:
|
|||||||
maintainers: fraff
|
maintainers: fraff
|
||||||
$modules/pacemaker_cluster.py:
|
$modules/pacemaker_cluster.py:
|
||||||
maintainers: matbu
|
maintainers: matbu
|
||||||
|
$modules/pacemaker_resource.py:
|
||||||
|
maintainers: munchtoast
|
||||||
$modules/packet_:
|
$modules/packet_:
|
||||||
maintainers: nurfet-becirevic t0mk
|
maintainers: nurfet-becirevic t0mk
|
||||||
$modules/packet_device.py:
|
$modules/packet_device.py:
|
||||||
@@ -1106,6 +1160,10 @@ files:
|
|||||||
$modules/proxmox_kvm.py:
|
$modules/proxmox_kvm.py:
|
||||||
ignore: skvidal
|
ignore: skvidal
|
||||||
maintainers: helldorado krauthosting
|
maintainers: helldorado krauthosting
|
||||||
|
$modules/proxmox_backup.py:
|
||||||
|
maintainers: IamLunchbox
|
||||||
|
$modules/proxmox_backup_info.py:
|
||||||
|
maintainers: raoufnezhad mmayabi
|
||||||
$modules/proxmox_nic.py:
|
$modules/proxmox_nic.py:
|
||||||
maintainers: Kogelvis krauthosting
|
maintainers: Kogelvis krauthosting
|
||||||
$modules/proxmox_node_info.py:
|
$modules/proxmox_node_info.py:
|
||||||
@@ -1155,12 +1213,6 @@ files:
|
|||||||
keywords: kvm libvirt proxmox qemu
|
keywords: kvm libvirt proxmox qemu
|
||||||
labels: rhevm virt
|
labels: rhevm virt
|
||||||
maintainers: $team_virt TimothyVandenbrande
|
maintainers: $team_virt TimothyVandenbrande
|
||||||
$modules/rhn_channel.py:
|
|
||||||
labels: rhn_channel
|
|
||||||
maintainers: vincentvdk alikins $team_rhn
|
|
||||||
$modules/rhn_register.py:
|
|
||||||
labels: rhn_register
|
|
||||||
maintainers: jlaska $team_rhn
|
|
||||||
$modules/rhsm_release.py:
|
$modules/rhsm_release.py:
|
||||||
maintainers: seandst $team_rhsm
|
maintainers: seandst $team_rhsm
|
||||||
$modules/rhsm_repository.py:
|
$modules/rhsm_repository.py:
|
||||||
@@ -1193,9 +1245,9 @@ files:
|
|||||||
$modules/scaleway_compute_private_network.py:
|
$modules/scaleway_compute_private_network.py:
|
||||||
maintainers: pastral
|
maintainers: pastral
|
||||||
$modules/scaleway_container.py:
|
$modules/scaleway_container.py:
|
||||||
maintainers: Lunik
|
maintainers: Lunik
|
||||||
$modules/scaleway_container_info.py:
|
$modules/scaleway_container_info.py:
|
||||||
maintainers: Lunik
|
maintainers: Lunik
|
||||||
$modules/scaleway_container_namespace.py:
|
$modules/scaleway_container_namespace.py:
|
||||||
maintainers: Lunik
|
maintainers: Lunik
|
||||||
$modules/scaleway_container_namespace_info.py:
|
$modules/scaleway_container_namespace_info.py:
|
||||||
@@ -1324,6 +1376,12 @@ files:
|
|||||||
maintainers: precurse
|
maintainers: precurse
|
||||||
$modules/sysrc.py:
|
$modules/sysrc.py:
|
||||||
maintainers: dlundgren
|
maintainers: dlundgren
|
||||||
|
$modules/systemd_creds_decrypt.py:
|
||||||
|
maintainers: konstruktoid
|
||||||
|
$modules/systemd_creds_encrypt.py:
|
||||||
|
maintainers: konstruktoid
|
||||||
|
$modules/systemd_info.py:
|
||||||
|
maintainers: NomakCooper
|
||||||
$modules/sysupgrade.py:
|
$modules/sysupgrade.py:
|
||||||
maintainers: precurse
|
maintainers: precurse
|
||||||
$modules/taiga_issue.py:
|
$modules/taiga_issue.py:
|
||||||
@@ -1355,16 +1413,19 @@ files:
|
|||||||
keywords: sophos utm
|
keywords: sophos utm
|
||||||
maintainers: $team_e_spirit
|
maintainers: $team_e_spirit
|
||||||
$modules/utm_ca_host_key_cert.py:
|
$modules/utm_ca_host_key_cert.py:
|
||||||
maintainers: stearz
|
ignore: stearz
|
||||||
|
maintainers: $team_e_spirit
|
||||||
$modules/utm_ca_host_key_cert_info.py:
|
$modules/utm_ca_host_key_cert_info.py:
|
||||||
maintainers: stearz
|
ignore: stearz
|
||||||
|
maintainers: $team_e_spirit
|
||||||
$modules/utm_network_interface_address.py:
|
$modules/utm_network_interface_address.py:
|
||||||
maintainers: steamx
|
maintainers: steamx
|
||||||
$modules/utm_network_interface_address_info.py:
|
$modules/utm_network_interface_address_info.py:
|
||||||
maintainers: steamx
|
maintainers: steamx
|
||||||
$modules/utm_proxy_auth_profile.py:
|
$modules/utm_proxy_auth_profile.py:
|
||||||
keywords: sophos utm
|
keywords: sophos utm
|
||||||
maintainers: $team_e_spirit stearz
|
ignore: stearz
|
||||||
|
maintainers: $team_e_spirit
|
||||||
$modules/utm_proxy_exception.py:
|
$modules/utm_proxy_exception.py:
|
||||||
keywords: sophos utm
|
keywords: sophos utm
|
||||||
maintainers: $team_e_spirit RickS-C137
|
maintainers: $team_e_spirit RickS-C137
|
||||||
@@ -1394,6 +1455,8 @@ files:
|
|||||||
maintainers: dinoocch the-maldridge
|
maintainers: dinoocch the-maldridge
|
||||||
$modules/xcc_:
|
$modules/xcc_:
|
||||||
maintainers: panyy3 renxulei
|
maintainers: panyy3 renxulei
|
||||||
|
$modules/xdg_mime.py:
|
||||||
|
maintainers: mhalano
|
||||||
$modules/xenserver_:
|
$modules/xenserver_:
|
||||||
maintainers: bvitnik
|
maintainers: bvitnik
|
||||||
$modules/xenserver_facts.py:
|
$modules/xenserver_facts.py:
|
||||||
@@ -1440,6 +1503,9 @@ files:
|
|||||||
maintainers: $team_suse
|
maintainers: $team_suse
|
||||||
$plugin_utils/ansible_type.py:
|
$plugin_utils/ansible_type.py:
|
||||||
maintainers: vbotka
|
maintainers: vbotka
|
||||||
|
$modules/zypper_repository_info.py:
|
||||||
|
labels: zypper
|
||||||
|
maintainers: $team_suse TobiasZeuch181
|
||||||
$plugin_utils/keys_filter.py:
|
$plugin_utils/keys_filter.py:
|
||||||
maintainers: vbotka
|
maintainers: vbotka
|
||||||
$plugin_utils/unsafe.py:
|
$plugin_utils/unsafe.py:
|
||||||
@@ -1497,6 +1563,8 @@ files:
|
|||||||
maintainers: baldwinSPC nurfet-becirevic t0mk teebes
|
maintainers: baldwinSPC nurfet-becirevic t0mk teebes
|
||||||
docs/docsite/rst/guide_scaleway.rst:
|
docs/docsite/rst/guide_scaleway.rst:
|
||||||
maintainers: $team_scaleway
|
maintainers: $team_scaleway
|
||||||
|
docs/docsite/rst/guide_uthelper.rst:
|
||||||
|
maintainers: russoz
|
||||||
docs/docsite/rst/guide_vardict.rst:
|
docs/docsite/rst/guide_vardict.rst:
|
||||||
maintainers: russoz
|
maintainers: russoz
|
||||||
docs/docsite/rst/test_guide.rst:
|
docs/docsite/rst/test_guide.rst:
|
||||||
@@ -1528,7 +1596,7 @@ macros:
|
|||||||
plugin_utils: plugins/plugin_utils
|
plugin_utils: plugins/plugin_utils
|
||||||
tests: plugins/test
|
tests: plugins/test
|
||||||
team_ansible_core:
|
team_ansible_core:
|
||||||
team_aix: MorrisA bcoca d-little flynn1973 gforster kairoaraujo marvin-sinister mator molekuul ramooncamacho wtcross
|
team_aix: MorrisA bcoca d-little flynn1973 gforster kairoaraujo marvin-sinister molekuul ramooncamacho wtcross
|
||||||
team_bsd: JoergFiedler MacLemon bcoca dch jasperla mekanix opoplawski overhacked tuxillo
|
team_bsd: JoergFiedler MacLemon bcoca dch jasperla mekanix opoplawski overhacked tuxillo
|
||||||
team_consul: sgargan apollo13 Ilgmi
|
team_consul: sgargan apollo13 Ilgmi
|
||||||
team_cyberark_conjur: jvanderhoof ryanprior
|
team_cyberark_conjur: jvanderhoof ryanprior
|
||||||
@@ -1548,10 +1616,9 @@ macros:
|
|||||||
team_oracle: manojmeda mross22 nalsaber
|
team_oracle: manojmeda mross22 nalsaber
|
||||||
team_purestorage: bannaych dnix101 genegr lionmax opslounge raekins sdodsley sile16
|
team_purestorage: bannaych dnix101 genegr lionmax opslounge raekins sdodsley sile16
|
||||||
team_redfish: mraineri tomasg2012 xmadsen renxulei rajeevkallur bhavya06 jyundt
|
team_redfish: mraineri tomasg2012 xmadsen renxulei rajeevkallur bhavya06 jyundt
|
||||||
team_rhn: FlossWare alikins barnabycourt vritant
|
|
||||||
team_rhsm: cnsnyder ptoscano
|
team_rhsm: cnsnyder ptoscano
|
||||||
team_scaleway: remyleone abarbare
|
team_scaleway: remyleone abarbare
|
||||||
team_solaris: bcoca fishman jasperla jpdasma mator scathatheworm troy2914 xen0l
|
team_solaris: bcoca fishman jasperla jpdasma scathatheworm troy2914 xen0l
|
||||||
team_suse: commel evrardjp lrupp toabctl AnderEnder alxgu andytom sealor
|
team_suse: commel evrardjp lrupp AnderEnder alxgu andytom sealor
|
||||||
team_virt: joshainglis karmab Thulium-Drake Ajpantuso
|
team_virt: joshainglis karmab Thulium-Drake Ajpantuso
|
||||||
team_wdc: mikemoerk
|
team_wdc: mikemoerk
|
||||||
|
|||||||
278
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
278
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -7,147 +7,147 @@ name: Bug report
|
|||||||
description: Create a report to help us improve
|
description: Create a report to help us improve
|
||||||
|
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
⚠
|
⚠
|
||||||
Verify first that your issue is not [already reported on GitHub][issue search].
|
Verify first that your issue is not [already reported on GitHub][issue search].
|
||||||
Also test if the latest release and devel branch are affected too.
|
Also test if the latest release and devel branch are affected too.
|
||||||
*Complete **all** sections as described, this form is processed automatically.*
|
*Complete **all** sections as described, this form is processed automatically.*
|
||||||
|
|
||||||
[issue search]: https://github.com/ansible-collections/community.general/search?q=is%3Aissue&type=issues
|
[issue search]: https://github.com/ansible-collections/community.general/search?q=is%3Aissue&type=issues
|
||||||
|
|
||||||
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Summary
|
label: Summary
|
||||||
description: Explain the problem briefly below.
|
description: Explain the problem briefly below.
|
||||||
placeholder: >-
|
placeholder: >-
|
||||||
When I try to do X with the collection from the main branch on GitHub, Y
|
When I try to do X with the collection from the main branch on GitHub, Y
|
||||||
breaks in a way Z under the env E. Here are all the details I know
|
breaks in a way Z under the env E. Here are all the details I know
|
||||||
about this problem...
|
about this problem...
|
||||||
validations:
|
validations:
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Issue Type
|
|
||||||
# FIXME: Once GitHub allows defining the default choice, update this
|
|
||||||
options:
|
|
||||||
- Bug Report
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
# For smaller collections we could use a multi-select and hardcode the list
|
|
||||||
# May generate this list via GitHub action and walking files under https://github.com/ansible-collections/community.general/tree/main/plugins
|
|
||||||
# Select from list, filter as you type (`mysql` would only show the 3 mysql components)
|
|
||||||
# OR freeform - doesn't seem to be supported in adaptivecards
|
|
||||||
label: Component Name
|
|
||||||
description: >-
|
|
||||||
Write the short name of the module, plugin, task or feature below,
|
|
||||||
*use your best guess if unsure*. Do not include `community.general.`!
|
|
||||||
placeholder: dnf, apt, yum, pip, user etc.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Ansible Version
|
|
||||||
description: >-
|
|
||||||
Paste verbatim output from `ansible --version` between
|
|
||||||
tripple backticks.
|
|
||||||
value: |
|
|
||||||
```console (paste below)
|
|
||||||
$ ansible --version
|
|
||||||
|
|
||||||
```
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Community.general Version
|
|
||||||
description: >-
|
|
||||||
Paste verbatim output from "ansible-galaxy collection list community.general"
|
|
||||||
between tripple backticks.
|
|
||||||
value: |
|
|
||||||
```console (paste below)
|
|
||||||
$ ansible-galaxy collection list community.general
|
|
||||||
|
|
||||||
```
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Configuration
|
|
||||||
description: >-
|
|
||||||
If this issue has an example piece of YAML that can help to reproduce this problem, please provide it.
|
|
||||||
This can be a piece of YAML from, e.g., an automation, script, scene or configuration.
|
|
||||||
Paste verbatim output from `ansible-config dump --only-changed` between quotes
|
|
||||||
value: |
|
|
||||||
```console (paste below)
|
|
||||||
$ ansible-config dump --only-changed
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: OS / Environment
|
|
||||||
description: >-
|
|
||||||
Provide all relevant information below, e.g. target OS versions,
|
|
||||||
network device firmware, etc.
|
|
||||||
placeholder: RHEL 8, CentOS Stream etc.
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Steps to Reproduce
|
|
||||||
description: |
|
|
||||||
Describe exactly how to reproduce the problem, using a minimal test-case. It would *really* help us understand your problem if you could also passed any playbooks, configs and commands you used.
|
|
||||||
|
|
||||||
**HINT:** You can paste https://gist.github.com links for larger files.
|
|
||||||
value: |
|
|
||||||
<!--- Paste example playbooks or commands between quotes below -->
|
|
||||||
```yaml (paste below)
|
|
||||||
|
|
||||||
```
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Expected Results
|
|
||||||
description: >-
|
|
||||||
Describe what you expected to happen when running the steps above.
|
|
||||||
placeholder: >-
|
|
||||||
I expected X to happen because I assumed Y.
|
|
||||||
that it did not.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Actual Results
|
|
||||||
description: |
|
|
||||||
Describe what actually happened. If possible run with extra verbosity (`-vvvv`).
|
|
||||||
|
|
||||||
Paste verbatim command output between quotes.
|
|
||||||
value: |
|
|
||||||
```console (paste below)
|
|
||||||
|
|
||||||
```
|
|
||||||
- type: checkboxes
|
|
||||||
attributes:
|
|
||||||
label: Code of Conduct
|
|
||||||
description: |
|
|
||||||
Read the [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--ansible-collections) first.
|
|
||||||
options:
|
|
||||||
- label: I agree to follow the Ansible Code of Conduct
|
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
attributes:
|
||||||
|
label: Issue Type
|
||||||
|
# FIXME: Once GitHub allows defining the default choice, update this
|
||||||
|
options:
|
||||||
|
- Bug Report
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
# For smaller collections we could use a multi-select and hardcode the list
|
||||||
|
# May generate this list via GitHub action and walking files under https://github.com/ansible-collections/community.general/tree/main/plugins
|
||||||
|
# Select from list, filter as you type (`mysql` would only show the 3 mysql components)
|
||||||
|
# OR freeform - doesn't seem to be supported in adaptivecards
|
||||||
|
label: Component Name
|
||||||
|
description: >-
|
||||||
|
Write the short name of the module, plugin, task or feature below,
|
||||||
|
*use your best guess if unsure*. Do not include `community.general.`!
|
||||||
|
placeholder: dnf, apt, yum, pip, user etc.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Ansible Version
|
||||||
|
description: >-
|
||||||
|
Paste verbatim output from `ansible --version` between
|
||||||
|
tripple backticks.
|
||||||
|
value: |
|
||||||
|
```console (paste below)
|
||||||
|
$ ansible --version
|
||||||
|
|
||||||
|
```
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Community.general Version
|
||||||
|
description: >-
|
||||||
|
Paste verbatim output from "ansible-galaxy collection list community.general"
|
||||||
|
between tripple backticks.
|
||||||
|
value: |
|
||||||
|
```console (paste below)
|
||||||
|
$ ansible-galaxy collection list community.general
|
||||||
|
|
||||||
|
```
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Configuration
|
||||||
|
description: >-
|
||||||
|
If this issue has an example piece of YAML that can help to reproduce this problem, please provide it.
|
||||||
|
This can be a piece of YAML from, e.g., an automation, script, scene or configuration.
|
||||||
|
Paste verbatim output from `ansible-config dump --only-changed` between quotes
|
||||||
|
value: |
|
||||||
|
```console (paste below)
|
||||||
|
$ ansible-config dump --only-changed
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: OS / Environment
|
||||||
|
description: >-
|
||||||
|
Provide all relevant information below, e.g. target OS versions,
|
||||||
|
network device firmware, etc.
|
||||||
|
placeholder: RHEL 8, CentOS Stream etc.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Steps to Reproduce
|
||||||
|
description: |
|
||||||
|
Describe exactly how to reproduce the problem, using a minimal test-case. It would *really* help us understand your problem if you could also passed any playbooks, configs and commands you used.
|
||||||
|
|
||||||
|
**HINT:** You can paste https://gist.github.com links for larger files.
|
||||||
|
value: |
|
||||||
|
<!--- Paste example playbooks or commands between quotes below -->
|
||||||
|
```yaml (paste below)
|
||||||
|
|
||||||
|
```
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Expected Results
|
||||||
|
description: >-
|
||||||
|
Describe what you expected to happen when running the steps above.
|
||||||
|
placeholder: >-
|
||||||
|
I expected X to happen because I assumed Y.
|
||||||
|
that it did not.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Actual Results
|
||||||
|
description: |
|
||||||
|
Describe what actually happened. If possible run with extra verbosity (`-vvvv`).
|
||||||
|
|
||||||
|
Paste verbatim command output between quotes.
|
||||||
|
value: |
|
||||||
|
```console (paste below)
|
||||||
|
|
||||||
|
```
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Code of Conduct
|
||||||
|
description: |
|
||||||
|
Read the [Ansible Code of Conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--ansible-collections) first.
|
||||||
|
options:
|
||||||
|
- label: I agree to follow the Ansible Code of Conduct
|
||||||
|
required: true
|
||||||
...
|
...
|
||||||
|
|||||||
42
.github/ISSUE_TEMPLATE/config.yml
vendored
42
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -6,26 +6,26 @@
|
|||||||
# Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser
|
# Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser
|
||||||
blank_issues_enabled: false # default: true
|
blank_issues_enabled: false # default: true
|
||||||
contact_links:
|
contact_links:
|
||||||
- name: Security bug report
|
- name: Security bug report
|
||||||
url: https://docs.ansible.com/ansible-core/devel/community/reporting_bugs_and_features.html?utm_medium=github&utm_source=issue_template_chooser_ansible_collections
|
url: https://docs.ansible.com/projects/ansible-core/devel/community/reporting_bugs_and_features.html?utm_medium=github&utm_source=issue_template_chooser_ansible_collections
|
||||||
about: |
|
about: |
|
||||||
Please learn how to report security vulnerabilities here.
|
Please learn how to report security vulnerabilities here.
|
||||||
|
|
||||||
For all security related bugs, email security@ansible.com
|
For all security related bugs, email security@ansible.com
|
||||||
instead of using this issue tracker and you will receive
|
instead of using this issue tracker and you will receive
|
||||||
a prompt response.
|
a prompt response.
|
||||||
|
|
||||||
For more information, see
|
For more information, see
|
||||||
https://docs.ansible.com/ansible/latest/community/reporting_bugs_and_features.html
|
https://docs.ansible.com/projects/ansible/latest/community/reporting_bugs_and_features.html
|
||||||
- name: Ansible Code of Conduct
|
- name: Ansible Code of Conduct
|
||||||
url: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_template_chooser_ansible_collections
|
url: https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_template_chooser_ansible_collections
|
||||||
about: Be nice to other members of the community.
|
about: Be nice to other members of the community.
|
||||||
- name: Talks to the community
|
- name: Talks to the community
|
||||||
url: https://docs.ansible.com/ansible/latest/community/communication.html?utm_medium=github&utm_source=issue_template_chooser#mailing-list-information
|
url: https://docs.ansible.com/projects/ansible/latest/community/communication.html?utm_medium=github&utm_source=issue_template_chooser#mailing-list-information
|
||||||
about: Please ask and answer usage questions here
|
about: Please ask and answer usage questions here
|
||||||
- name: Working groups
|
- name: Working groups
|
||||||
url: https://github.com/ansible/community/wiki
|
url: https://github.com/ansible/community/wiki
|
||||||
about: Interested in improving a specific area? Become a part of a working group!
|
about: Interested in improving a specific area? Become a part of a working group!
|
||||||
- name: For Enterprise
|
- name: For Enterprise
|
||||||
url: https://www.ansible.com/products/engine?utm_medium=github&utm_source=issue_template_chooser_ansible_collections
|
url: https://www.ansible.com/products/engine?utm_medium=github&utm_source=issue_template_chooser_ansible_collections
|
||||||
about: Red Hat offers support for the Ansible Automation Platform
|
about: Red Hat offers support for the Ansible Automation Platform
|
||||||
|
|||||||
226
.github/ISSUE_TEMPLATE/documentation_report.yml
vendored
226
.github/ISSUE_TEMPLATE/documentation_report.yml
vendored
@@ -8,122 +8,122 @@ description: Ask us about docs
|
|||||||
# NOTE: issue body is enabled to allow screenshots
|
# NOTE: issue body is enabled to allow screenshots
|
||||||
|
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
⚠
|
⚠
|
||||||
Verify first that your issue is not [already reported on GitHub][issue search].
|
Verify first that your issue is not [already reported on GitHub][issue search].
|
||||||
Also test if the latest release and devel branch are affected too.
|
Also test if the latest release and devel branch are affected too.
|
||||||
*Complete **all** sections as described, this form is processed automatically.*
|
*Complete **all** sections as described, this form is processed automatically.*
|
||||||
|
|
||||||
[issue search]: https://github.com/ansible-collections/community.general/search?q=is%3Aissue&type=issues
|
[issue search]: https://github.com/ansible-collections/community.general/search?q=is%3Aissue&type=issues
|
||||||
|
|
||||||
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Summary
|
label: Summary
|
||||||
description: |
|
description: |
|
||||||
Explain the problem briefly below, add suggestions to wording or structure.
|
Explain the problem briefly below, add suggestions to wording or structure.
|
||||||
|
|
||||||
**HINT:** Did you know the documentation has an `Edit on GitHub` link on every page?
|
**HINT:** Did you know the documentation has an `Edit on GitHub` link on every page?
|
||||||
placeholder: >-
|
placeholder: >-
|
||||||
I was reading the Collection documentation of version X and I'm having
|
I was reading the Collection documentation of version X and I'm having
|
||||||
problems understanding Y. It would be very helpful if that got
|
problems understanding Y. It would be very helpful if that got
|
||||||
rephrased as Z.
|
rephrased as Z.
|
||||||
validations:
|
validations:
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Issue Type
|
|
||||||
# FIXME: Once GitHub allows defining the default choice, update this
|
|
||||||
options:
|
|
||||||
- Documentation Report
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: input
|
|
||||||
attributes:
|
|
||||||
label: Component Name
|
|
||||||
description: >-
|
|
||||||
Write the short name of the file, module, plugin, task or feature below,
|
|
||||||
*use your best guess if unsure*. Do not include `community.general.`!
|
|
||||||
placeholder: mysql_user
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Ansible Version
|
|
||||||
description: >-
|
|
||||||
Paste verbatim output from `ansible --version` between
|
|
||||||
tripple backticks.
|
|
||||||
value: |
|
|
||||||
```console (paste below)
|
|
||||||
$ ansible --version
|
|
||||||
|
|
||||||
```
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Community.general Version
|
|
||||||
description: >-
|
|
||||||
Paste verbatim output from "ansible-galaxy collection list community.general"
|
|
||||||
between tripple backticks.
|
|
||||||
value: |
|
|
||||||
```console (paste below)
|
|
||||||
$ ansible-galaxy collection list community.general
|
|
||||||
|
|
||||||
```
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Configuration
|
|
||||||
description: >-
|
|
||||||
Paste verbatim output from `ansible-config dump --only-changed` between quotes.
|
|
||||||
value: |
|
|
||||||
```console (paste below)
|
|
||||||
$ ansible-config dump --only-changed
|
|
||||||
|
|
||||||
```
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: OS / Environment
|
|
||||||
description: >-
|
|
||||||
Provide all relevant information below, e.g. OS version,
|
|
||||||
browser, etc.
|
|
||||||
placeholder: Fedora 33, Firefox etc.
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Additional Information
|
|
||||||
description: |
|
|
||||||
Describe how this improves the documentation, e.g. before/after situation or screenshots.
|
|
||||||
|
|
||||||
**Tip:** It's not possible to upload the screenshot via this field directly but you can use the last textarea in this form to attach them.
|
|
||||||
|
|
||||||
**HINT:** You can paste https://gist.github.com links for larger files.
|
|
||||||
placeholder: >-
|
|
||||||
When the improvement is applied, it makes it more straightforward
|
|
||||||
to understand X.
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
- type: checkboxes
|
|
||||||
attributes:
|
|
||||||
label: Code of Conduct
|
|
||||||
description: |
|
|
||||||
Read the [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--ansible-collections) first.
|
|
||||||
options:
|
|
||||||
- label: I agree to follow the Ansible Code of Conduct
|
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
attributes:
|
||||||
|
label: Issue Type
|
||||||
|
# FIXME: Once GitHub allows defining the default choice, update this
|
||||||
|
options:
|
||||||
|
- Documentation Report
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
attributes:
|
||||||
|
label: Component Name
|
||||||
|
description: >-
|
||||||
|
Write the short name of the file, module, plugin, task or feature below,
|
||||||
|
*use your best guess if unsure*. Do not include `community.general.`!
|
||||||
|
placeholder: mysql_user
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Ansible Version
|
||||||
|
description: >-
|
||||||
|
Paste verbatim output from `ansible --version` between
|
||||||
|
tripple backticks.
|
||||||
|
value: |
|
||||||
|
```console (paste below)
|
||||||
|
$ ansible --version
|
||||||
|
|
||||||
|
```
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Community.general Version
|
||||||
|
description: >-
|
||||||
|
Paste verbatim output from "ansible-galaxy collection list community.general"
|
||||||
|
between tripple backticks.
|
||||||
|
value: |
|
||||||
|
```console (paste below)
|
||||||
|
$ ansible-galaxy collection list community.general
|
||||||
|
|
||||||
|
```
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Configuration
|
||||||
|
description: >-
|
||||||
|
Paste verbatim output from `ansible-config dump --only-changed` between quotes.
|
||||||
|
value: |
|
||||||
|
```console (paste below)
|
||||||
|
$ ansible-config dump --only-changed
|
||||||
|
|
||||||
|
```
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: OS / Environment
|
||||||
|
description: >-
|
||||||
|
Provide all relevant information below, e.g. OS version,
|
||||||
|
browser, etc.
|
||||||
|
placeholder: Fedora 33, Firefox etc.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Additional Information
|
||||||
|
description: |
|
||||||
|
Describe how this improves the documentation, e.g. before/after situation or screenshots.
|
||||||
|
|
||||||
|
**Tip:** It's not possible to upload the screenshot via this field directly but you can use the last textarea in this form to attach them.
|
||||||
|
|
||||||
|
**HINT:** You can paste https://gist.github.com links for larger files.
|
||||||
|
placeholder: >-
|
||||||
|
When the improvement is applied, it makes it more straightforward
|
||||||
|
to understand X.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Code of Conduct
|
||||||
|
description: |
|
||||||
|
Read the [Ansible Code of Conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--ansible-collections) first.
|
||||||
|
options:
|
||||||
|
- label: I agree to follow the Ansible Code of Conduct
|
||||||
|
required: true
|
||||||
...
|
...
|
||||||
|
|||||||
118
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
118
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -7,67 +7,67 @@ name: Feature request
|
|||||||
description: Suggest an idea for this project
|
description: Suggest an idea for this project
|
||||||
|
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
⚠
|
⚠
|
||||||
Verify first that your issue is not [already reported on GitHub][issue search].
|
Verify first that your issue is not [already reported on GitHub][issue search].
|
||||||
Also test if the latest release and devel branch are affected too.
|
Also test if the latest release and devel branch are affected too.
|
||||||
*Complete **all** sections as described, this form is processed automatically.*
|
*Complete **all** sections as described, this form is processed automatically.*
|
||||||
|
|
||||||
[issue search]: https://github.com/ansible-collections/community.general/search?q=is%3Aissue&type=issues
|
[issue search]: https://github.com/ansible-collections/community.general/search?q=is%3Aissue&type=issues
|
||||||
|
|
||||||
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Summary
|
label: Summary
|
||||||
description: Describe the new feature/improvement briefly below.
|
description: Describe the new feature/improvement briefly below.
|
||||||
placeholder: >-
|
placeholder: >-
|
||||||
I am trying to do X with the collection from the main branch on GitHub and
|
I am trying to do X with the collection from the main branch on GitHub and
|
||||||
I think that implementing a feature Y would be very helpful for me and
|
I think that implementing a feature Y would be very helpful for me and
|
||||||
every other user of community.general because of Z.
|
every other user of community.general because of Z.
|
||||||
validations:
|
validations:
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Issue Type
|
|
||||||
# FIXME: Once GitHub allows defining the default choice, update this
|
|
||||||
options:
|
|
||||||
- Feature Idea
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: input
|
|
||||||
attributes:
|
|
||||||
label: Component Name
|
|
||||||
description: >-
|
|
||||||
Write the short name of the module or plugin, or which other part(s) of the collection this feature affects.
|
|
||||||
*use your best guess if unsure*. Do not include `community.general.`!
|
|
||||||
placeholder: dnf, apt, yum, pip, user etc.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Additional Information
|
|
||||||
description: |
|
|
||||||
Describe how the feature would be used, why it is needed and what it would solve.
|
|
||||||
|
|
||||||
**HINT:** You can paste https://gist.github.com links for larger files.
|
|
||||||
value: |
|
|
||||||
<!--- Paste example playbooks or commands between quotes below -->
|
|
||||||
```yaml (paste below)
|
|
||||||
|
|
||||||
```
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: checkboxes
|
|
||||||
attributes:
|
|
||||||
label: Code of Conduct
|
|
||||||
description: |
|
|
||||||
Read the [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--ansible-collections) first.
|
|
||||||
options:
|
|
||||||
- label: I agree to follow the Ansible Code of Conduct
|
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
attributes:
|
||||||
|
label: Issue Type
|
||||||
|
# FIXME: Once GitHub allows defining the default choice, update this
|
||||||
|
options:
|
||||||
|
- Feature Idea
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
attributes:
|
||||||
|
label: Component Name
|
||||||
|
description: >-
|
||||||
|
Write the short name of the module or plugin, or which other part(s) of the collection this feature affects.
|
||||||
|
*use your best guess if unsure*. Do not include `community.general.`!
|
||||||
|
placeholder: dnf, apt, yum, pip, user etc.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Additional Information
|
||||||
|
description: |
|
||||||
|
Describe how the feature would be used, why it is needed and what it would solve.
|
||||||
|
|
||||||
|
**HINT:** You can paste https://gist.github.com links for larger files.
|
||||||
|
value: |
|
||||||
|
<!--- Paste example playbooks or commands between quotes below -->
|
||||||
|
```yaml (paste below)
|
||||||
|
|
||||||
|
```
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Code of Conduct
|
||||||
|
description: |
|
||||||
|
Read the [Ansible Code of Conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--ansible-collections) first.
|
||||||
|
options:
|
||||||
|
- label: I agree to follow the Ansible Code of Conduct
|
||||||
|
required: true
|
||||||
...
|
...
|
||||||
|
|||||||
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@@ -4,7 +4,7 @@
|
|||||||
<!--- HINT: Include "Fixes #nnn" if you are fixing an existing issue -->
|
<!--- HINT: Include "Fixes #nnn" if you are fixing an existing issue -->
|
||||||
|
|
||||||
<!--- Please do not forget to include a changelog fragment:
|
<!--- Please do not forget to include a changelog fragment:
|
||||||
https://docs.ansible.com/ansible/devel/community/collection_development_process.html#creating-changelog-fragments
|
https://docs.ansible.com/projects/ansible/devel/community/collection_development_process.html#creating-changelog-fragments
|
||||||
No need to include one for docs-only or test-only PR, and for new plugin/module PRs.
|
No need to include one for docs-only or test-only PR, and for new plugin/module PRs.
|
||||||
Read about more details in CONTRIBUTING.md.
|
Read about more details in CONTRIBUTING.md.
|
||||||
-->
|
-->
|
||||||
|
|||||||
161
.github/workflows/ansible-test.yml
vendored
161
.github/workflows/ansible-test.yml
vendored
@@ -7,7 +7,7 @@
|
|||||||
# https://github.com/marketplace/actions/ansible-test
|
# https://github.com/marketplace/actions/ansible-test
|
||||||
|
|
||||||
name: EOL CI
|
name: EOL CI
|
||||||
on:
|
"on":
|
||||||
# Run EOL CI against all pushes (direct commits, also merged PRs), Pull Requests
|
# Run EOL CI against all pushes (direct commits, also merged PRs), Pull Requests
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
@@ -29,8 +29,9 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
ansible:
|
ansible:
|
||||||
- '2.13'
|
- '2.15'
|
||||||
- '2.14'
|
- '2.16'
|
||||||
|
- '2.17'
|
||||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
||||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
||||||
# image for these stable branches. The list of branches where this is necessary will
|
# image for these stable branches. The list of branches where this is necessary will
|
||||||
@@ -46,6 +47,8 @@ jobs:
|
|||||||
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
||||||
pull-request-change-detection: 'true'
|
pull-request-change-detection: 'true'
|
||||||
testing-type: sanity
|
testing-type: sanity
|
||||||
|
pre-test-cmd: >-
|
||||||
|
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
|
||||||
|
|
||||||
units:
|
units:
|
||||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
||||||
@@ -66,16 +69,24 @@ jobs:
|
|||||||
exclude:
|
exclude:
|
||||||
- ansible: ''
|
- ansible: ''
|
||||||
include:
|
include:
|
||||||
- ansible: '2.13'
|
- ansible: '2.15'
|
||||||
python: '2.7'
|
python: '2.7'
|
||||||
- ansible: '2.13'
|
- ansible: '2.15'
|
||||||
python: '3.8'
|
python: '3.5'
|
||||||
- ansible: '2.13'
|
- ansible: '2.15'
|
||||||
|
python: '3.10'
|
||||||
|
- ansible: '2.16'
|
||||||
python: '2.7'
|
python: '2.7'
|
||||||
- ansible: '2.13'
|
- ansible: '2.16'
|
||||||
python: '3.8'
|
python: '3.6'
|
||||||
- ansible: '2.14'
|
- ansible: '2.16'
|
||||||
python: '3.9'
|
python: '3.11'
|
||||||
|
- ansible: '2.17'
|
||||||
|
python: '3.7'
|
||||||
|
- ansible: '2.17'
|
||||||
|
python: '3.10'
|
||||||
|
- ansible: '2.17'
|
||||||
|
python: '3.12'
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: >-
|
- name: >-
|
||||||
@@ -116,66 +127,105 @@ jobs:
|
|||||||
exclude:
|
exclude:
|
||||||
- ansible: ''
|
- ansible: ''
|
||||||
include:
|
include:
|
||||||
# 2.13
|
# 2.15
|
||||||
- ansible: '2.13'
|
- ansible: '2.15'
|
||||||
docker: fedora35
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/1/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: fedora35
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/2/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: fedora35
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/3/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: opensuse15py2
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/1/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: opensuse15py2
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/2/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: opensuse15py2
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/3/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: alpine3
|
docker: alpine3
|
||||||
python: ''
|
python: ''
|
||||||
target: azp/posix/1/
|
target: azp/posix/1/
|
||||||
- ansible: '2.13'
|
- ansible: '2.15'
|
||||||
docker: alpine3
|
docker: alpine3
|
||||||
python: ''
|
python: ''
|
||||||
target: azp/posix/2/
|
target: azp/posix/2/
|
||||||
- ansible: '2.13'
|
- ansible: '2.15'
|
||||||
docker: alpine3
|
docker: alpine3
|
||||||
python: ''
|
python: ''
|
||||||
target: azp/posix/3/
|
target: azp/posix/3/
|
||||||
# 2.14
|
- ansible: '2.15'
|
||||||
- ansible: '2.14'
|
docker: fedora37
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/1/
|
||||||
|
- ansible: '2.15'
|
||||||
|
docker: fedora37
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/2/
|
||||||
|
- ansible: '2.15'
|
||||||
|
docker: fedora37
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/3/
|
||||||
|
# 2.16
|
||||||
|
- ansible: '2.16'
|
||||||
docker: alpine3
|
docker: alpine3
|
||||||
python: ''
|
python: ''
|
||||||
target: azp/posix/1/
|
target: azp/posix/1/
|
||||||
- ansible: '2.14'
|
- ansible: '2.16'
|
||||||
docker: alpine3
|
docker: alpine3
|
||||||
python: ''
|
python: ''
|
||||||
target: azp/posix/2/
|
target: azp/posix/2/
|
||||||
- ansible: '2.14'
|
- ansible: '2.16'
|
||||||
docker: alpine3
|
docker: alpine3
|
||||||
python: ''
|
python: ''
|
||||||
target: azp/posix/3/
|
target: azp/posix/3/
|
||||||
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
|
- ansible: '2.16'
|
||||||
# - ansible: '2.13'
|
docker: fedora38
|
||||||
# docker: default
|
python: ''
|
||||||
# python: '3.9'
|
target: azp/posix/1/
|
||||||
# target: azp/generic/1/
|
- ansible: '2.16'
|
||||||
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
|
docker: fedora38
|
||||||
# - ansible: '2.14'
|
python: ''
|
||||||
# docker: default
|
target: azp/posix/2/
|
||||||
# python: '3.10'
|
- ansible: '2.16'
|
||||||
# target: azp/generic/1/
|
docker: fedora38
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/3/
|
||||||
|
- ansible: '2.16'
|
||||||
|
docker: opensuse15
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/1/
|
||||||
|
- ansible: '2.16'
|
||||||
|
docker: opensuse15
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/2/
|
||||||
|
- ansible: '2.16'
|
||||||
|
docker: opensuse15
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/3/
|
||||||
|
# 2.17
|
||||||
|
- ansible: '2.17'
|
||||||
|
docker: fedora39
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/1/
|
||||||
|
- ansible: '2.17'
|
||||||
|
docker: fedora39
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/2/
|
||||||
|
- ansible: '2.17'
|
||||||
|
docker: fedora39
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/3/
|
||||||
|
- ansible: '2.17'
|
||||||
|
docker: alpine319
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/1/
|
||||||
|
- ansible: '2.17'
|
||||||
|
docker: alpine319
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/2/
|
||||||
|
- ansible: '2.17'
|
||||||
|
docker: alpine319
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/3/
|
||||||
|
- ansible: '2.17'
|
||||||
|
docker: ubuntu2004
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/1/
|
||||||
|
- ansible: '2.17'
|
||||||
|
docker: ubuntu2004
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/2/
|
||||||
|
- ansible: '2.17'
|
||||||
|
docker: ubuntu2004
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/3/
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: >-
|
- name: >-
|
||||||
@@ -191,12 +241,15 @@ jobs:
|
|||||||
integration-continue-on-error: 'false'
|
integration-continue-on-error: 'false'
|
||||||
integration-diff: 'false'
|
integration-diff: 'false'
|
||||||
integration-retry-on-error: 'true'
|
integration-retry-on-error: 'true'
|
||||||
|
# TODO: remove "--branch stable-2" from community.crypto install once we're only using ansible-core 2.17 or newer!
|
||||||
pre-test-cmd: >-
|
pre-test-cmd: >-
|
||||||
mkdir -p ../../ansible
|
mkdir -p ../../ansible
|
||||||
;
|
;
|
||||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/ansible.posix.git ../../ansible/posix
|
git clone --depth=1 --single-branch https://github.com/ansible-collections/ansible.posix.git ../../ansible/posix
|
||||||
;
|
;
|
||||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.crypto.git ../../community/crypto
|
git clone --depth=1 --single-branch --branch stable-2 https://github.com/ansible-collections/community.crypto.git ../../community/crypto
|
||||||
|
;
|
||||||
|
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.docker.git ../../community/docker
|
||||||
;
|
;
|
||||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
|
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
|
||||||
pull-request-change-detection: 'true'
|
pull-request-change-detection: 'true'
|
||||||
|
|||||||
22
.github/workflows/codeql-analysis.yml
vendored
22
.github/workflows/codeql-analysis.yml
vendored
@@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
name: "Code scanning - action"
|
name: "Code scanning - action"
|
||||||
|
|
||||||
on:
|
"on":
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '26 19 * * 1'
|
- cron: '26 19 * * 1'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
@@ -23,14 +23,16 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3
|
uses: github/codeql-action/init@v4
|
||||||
with:
|
with:
|
||||||
languages: python
|
languages: python
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3
|
uses: github/codeql-action/analyze@v4
|
||||||
|
|||||||
20
.github/workflows/import-galaxy.yml
vendored
20
.github/workflows/import-galaxy.yml
vendored
@@ -1,20 +0,0 @@
|
|||||||
---
|
|
||||||
# Copyright (c) Ansible Project
|
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
||||||
|
|
||||||
name: import-galaxy
|
|
||||||
'on':
|
|
||||||
# Run CI against all pushes (direct commits, also merged PRs) to main, and all Pull Requests
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
- stable-*
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
import-galaxy:
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
name: Test to import built collection artifact with Galaxy importer
|
|
||||||
uses: ansible-community/github-action-test-galaxy-import/.github/workflows/test-galaxy-import.yml@main
|
|
||||||
28
.github/workflows/nox.yml
vendored
Normal file
28
.github/workflows/nox.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
---
|
||||||
|
# Copyright (c) Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
name: nox
|
||||||
|
'on':
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- stable-*
|
||||||
|
pull_request:
|
||||||
|
# Run CI once per day (at 08:00 UTC)
|
||||||
|
schedule:
|
||||||
|
- cron: '0 8 * * *'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
nox:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: "Run extra sanity tests"
|
||||||
|
steps:
|
||||||
|
- name: Check out collection
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
- name: Run nox
|
||||||
|
uses: ansible-community/antsibull-nox@main
|
||||||
30
.github/workflows/reuse.yml
vendored
30
.github/workflows/reuse.yml
vendored
@@ -1,30 +0,0 @@
|
|||||||
---
|
|
||||||
# Copyright (c) Ansible Project
|
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
||||||
|
|
||||||
name: Verify REUSE
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [main]
|
|
||||||
pull_request_target:
|
|
||||||
types: [opened, synchronize, reopened]
|
|
||||||
branches: [main]
|
|
||||||
# Run CI once per day (at 07:30 UTC)
|
|
||||||
schedule:
|
|
||||||
- cron: '30 7 * * *'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check:
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.sha || '' }}
|
|
||||||
|
|
||||||
- name: REUSE Compliance Check
|
|
||||||
uses: fsfe/reuse-action@v4
|
|
||||||
14
.gitignore
vendored
14
.gitignore
vendored
@@ -383,6 +383,16 @@ cython_debug/
|
|||||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
#.idea/
|
#.idea/
|
||||||
|
|
||||||
|
### Python Patch ###
|
||||||
|
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
||||||
|
poetry.toml
|
||||||
|
|
||||||
|
# ruff
|
||||||
|
.ruff_cache/
|
||||||
|
|
||||||
|
# LSP config files
|
||||||
|
pyrightconfig.json
|
||||||
|
|
||||||
### Vim ###
|
### Vim ###
|
||||||
# Swap
|
# Swap
|
||||||
[._]*.s[a-v][a-z]
|
[._]*.s[a-v][a-z]
|
||||||
@@ -482,6 +492,10 @@ tags
|
|||||||
# https://plugins.jetbrains.com/plugin/12206-codestream
|
# https://plugins.jetbrains.com/plugin/12206-codestream
|
||||||
.idea/codestream.xml
|
.idea/codestream.xml
|
||||||
|
|
||||||
|
# Azure Toolkit for IntelliJ plugin
|
||||||
|
# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij
|
||||||
|
.idea/**/azureSettings.xml
|
||||||
|
|
||||||
### Windows ###
|
### Windows ###
|
||||||
# Windows thumbnail cache files
|
# Windows thumbnail cache files
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
|
||||||
|
|
||||||
Files: changelogs/fragments/*
|
|
||||||
Copyright: Ansible Project
|
|
||||||
License: GPL-3.0-or-later
|
|
||||||
1852
CHANGELOG.md
1852
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
1599
CHANGELOG.rst
1599
CHANGELOG.rst
File diff suppressed because it is too large
Load Diff
100
CONTRIBUTING.md
100
CONTRIBUTING.md
@@ -6,7 +6,7 @@ SPDX-License-Identifier: GPL-3.0-or-later
|
|||||||
|
|
||||||
# Contributing
|
# Contributing
|
||||||
|
|
||||||
We follow [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) in all our contributions and interactions within this repository.
|
We follow [Ansible Code of Conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html) in all our contributions and interactions within this repository.
|
||||||
|
|
||||||
If you are a committer, also refer to the [collection's committer guidelines](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md).
|
If you are a committer, also refer to the [collection's committer guidelines](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md).
|
||||||
|
|
||||||
@@ -20,33 +20,80 @@ so you can cooperate to create a better solution together.
|
|||||||
* If you are interested in starting with an easy issue, look for [issues with an `easyfix` label](https://github.com/ansible-collections/community.general/labels/easyfix).
|
* If you are interested in starting with an easy issue, look for [issues with an `easyfix` label](https://github.com/ansible-collections/community.general/labels/easyfix).
|
||||||
* Often issues that are waiting for contributors to pick up have [the `waiting_on_contributor` label](https://github.com/ansible-collections/community.general/labels/waiting_on_contributor).
|
* Often issues that are waiting for contributors to pick up have [the `waiting_on_contributor` label](https://github.com/ansible-collections/community.general/labels/waiting_on_contributor).
|
||||||
|
|
||||||
## Open pull requests
|
## Review pull requests
|
||||||
|
|
||||||
Look through currently [open pull requests](https://github.com/ansible-collections/community.general/pulls).
|
Look through currently [open pull requests](https://github.com/ansible-collections/community.general/pulls).
|
||||||
|
|
||||||
You can help by reviewing them. Reviews help move pull requests to merge state. Some good pull requests cannot be merged only due to a lack of reviews. And it is always worth saying that good reviews are often more valuable than pull requests themselves.
|
You can help by reviewing them. Reviews help move pull requests to merge state. Some good pull requests cannot be merged only due to a lack of reviews. And it is always worth saying that good reviews are often more valuable than pull requests themselves.
|
||||||
Note that reviewing does not only mean code review, but also offering comments on new interfaces added to existing plugins/modules, interfaces of new plugins/modules, improving language (not everyone is a native english speaker), or testing bugfixes and new features!
|
Note that reviewing does not only mean code review, but also offering comments on new interfaces added to existing plugins/modules, interfaces of new plugins/modules, improving language (not everyone is a native English speaker), or testing bugfixes and new features!
|
||||||
|
|
||||||
Also, consider taking up a valuable, reviewed, but abandoned pull request which you could politely ask the original authors to complete yourself.
|
Also, consider taking up a valuable, reviewed, but abandoned pull request which you could politely ask the original authors to complete yourself.
|
||||||
|
|
||||||
|
## Open pull requests
|
||||||
|
|
||||||
|
Please read our ['Contributing to collections'](https://docs.ansible.com/projects/ansible/devel/dev_guide/developing_collections_contributing.html#contributing-to-a-collection-community-general) guide.
|
||||||
|
|
||||||
* Try committing your changes with an informative but short commit message.
|
* Try committing your changes with an informative but short commit message.
|
||||||
* Do not squash your commits and force-push to your branch if not needed. Reviews of your pull request are much easier with individual commits to comprehend the pull request history. All commits of your pull request branch will be squashed into one commit by GitHub upon merge.
|
* Do not squash your commits and force-push to your branch if not needed. Reviews of your pull request are much easier with individual commits to comprehend the pull request history. All commits of your pull request branch will be squashed into one commit by GitHub upon merge.
|
||||||
* Do not add merge commits to your PR. The bot will complain and you will have to rebase ([instructions for rebasing](https://docs.ansible.com/ansible/latest/dev_guide/developing_rebasing.html)) to remove them before your PR can be merged. To avoid that git automatically does merges during pulls, you can configure it to do rebases instead by running `git config pull.rebase true` inside the repository checkout.
|
* Do not add merge commits to your PR. The bot will complain and you will have to rebase ([instructions for rebasing](https://docs.ansible.com/projects/ansible/latest/dev_guide/developing_rebasing.html)) to remove them before your PR can be merged. To avoid that git automatically does merges during pulls, you can configure it to do rebases instead by running `git config pull.rebase true` inside the repository checkout.
|
||||||
* Make sure your PR includes a [changelog fragment](https://docs.ansible.com/ansible/devel/community/collection_development_process.html#creating-a-changelog-fragment).
|
* Make sure your PR includes a [changelog fragment](https://docs.ansible.com/projects/ansible/devel/community/collection_development_process.html#creating-a-changelog-fragment).
|
||||||
* You must not include a fragment for new modules or new plugins. Also you shouldn't include one for docs-only changes. (If you're not sure, simply don't include one, we'll tell you whether one is needed or not :) )
|
* You must not include a fragment for new modules or new plugins. Also you shouldn't include one for docs-only changes. (If you're not sure, simply don't include one, we'll tell you whether one is needed or not :) )
|
||||||
* Please always include a link to the pull request itself, and if the PR is about an issue, also a link to the issue. Also make sure the fragment ends with a period, and begins with a lower-case letter after `-`. (Again, if you don't do this, we'll add suggestions to fix it, so don't worry too much :) )
|
* Please always include a link to the pull request itself, and if the PR is about an issue, also a link to the issue. Also make sure the fragment ends with a period, and begins with a lower-case letter after `-`. (Again, if you don't do this, we'll add suggestions to fix it, so don't worry too much :) )
|
||||||
* Avoid reformatting unrelated parts of the codebase in your PR. These types of changes will likely be requested for reversion, create additional work for reviewers, and may cause approval to be delayed.
|
* Avoid reformatting unrelated parts of the codebase in your PR. These types of changes will likely be requested for reversion, create additional work for reviewers, and may cause approval to be delayed.
|
||||||
|
|
||||||
You can also read [our Quick-start development guide](https://github.com/ansible/community-docs/blob/main/create_pr_quick_start_guide.rst).
|
You can also read the Ansible community's [Quick-start development guide](https://docs.ansible.com/projects/ansible/devel/community/create_pr_quick_start.html).
|
||||||
|
|
||||||
## Test pull requests
|
## Test pull requests
|
||||||
|
|
||||||
If you want to test a PR locally, refer to [our testing guide](https://github.com/ansible/community-docs/blob/main/test_pr_locally_guide.rst) for instructions on how do it quickly.
|
If you want to test a PR locally, refer to [our testing guide](https://docs.ansible.com/projects/ansible/devel/community/collection_contributors/collection_test_pr_locally.html) for instructions on how do it quickly.
|
||||||
|
|
||||||
If you find any inconsistencies or places in this document which can be improved, feel free to raise an issue or pull request to fix it.
|
If you find any inconsistencies or places in this document which can be improved, feel free to raise an issue or pull request to fix it.
|
||||||
|
|
||||||
## Run sanity, unit or integration tests locally
|
## Run sanity or unit locally (with antsibull-nox)
|
||||||
|
|
||||||
You have to check out the repository into a specific path structure to be able to run `ansible-test`. The path to the git checkout must end with `.../ansible_collections/community/general`. Please see [our testing guide](https://github.com/ansible/community-docs/blob/main/test_pr_locally_guide.rst) for instructions on how to check out the repository into a correct path structure. The short version of these instructions is:
|
The easiest way to run sanity and unit tests locally is to use [antsibull-nox](https://docs.ansible.com/projects/antsibull-nox/).
|
||||||
|
(If you have [nox](https://nox.thea.codes/en/stable/) installed, it will automatically install antsibull-nox in a virtual environment for you.)
|
||||||
|
|
||||||
|
### Sanity tests
|
||||||
|
|
||||||
|
The following commands show how to run ansible-test sanity tests:
|
||||||
|
|
||||||
|
```.bash
|
||||||
|
# Run basic sanity tests for all files in the collection:
|
||||||
|
nox -Re ansible-test-sanity-devel
|
||||||
|
|
||||||
|
# Run basic sanity tests for the given files and directories:
|
||||||
|
nox -Re ansible-test-sanity-devel -- plugins/modules/system/pids.py tests/integration/targets/pids/
|
||||||
|
|
||||||
|
# Run all other sanity tests for all files in the collection:
|
||||||
|
nox -R
|
||||||
|
```
|
||||||
|
|
||||||
|
If you replace `-Re` with `-e`, respectively. If you leave `-R` away, then the virtual environments will be re-created. The `-R` re-uses them (if they already exist).
|
||||||
|
|
||||||
|
### Unit tests
|
||||||
|
|
||||||
|
The following commands show how to run unit tests:
|
||||||
|
|
||||||
|
```.bash
|
||||||
|
# Run all unit tests:
|
||||||
|
nox -Re ansible-test-units-devel
|
||||||
|
|
||||||
|
# Run all unit tests for one Python version (a lot faster):
|
||||||
|
nox -Re ansible-test-units-devel -- --python 3.13
|
||||||
|
|
||||||
|
# Run a specific unit test (for the nmcli module) for one Python version:
|
||||||
|
nox -Re ansible-test-units-devel -- --python 3.13 tests/unit/plugins/modules/net_tools/test_nmcli.py
|
||||||
|
```
|
||||||
|
|
||||||
|
If you replace `-Re` with `-e`, then the virtual environments will be re-created. The `-R` re-uses them (if they already exist).
|
||||||
|
|
||||||
|
## Run basic sanity, unit or integration tests locally (with ansible-test)
|
||||||
|
|
||||||
|
Instead of using antsibull-nox, you can also run sanity and unit tests with ansible-test directly.
|
||||||
|
This also allows you to run integration tests.
|
||||||
|
|
||||||
|
You have to check out the repository into a specific path structure to be able to run `ansible-test`. The path to the git checkout must end with `.../ansible_collections/community/general`. Please see [our testing guide](https://docs.ansible.com/projects/ansible/devel/community/collection_contributors/collection_test_pr_locally.html) for instructions on how to check out the repository into a correct path structure. The short version of these instructions is:
|
||||||
|
|
||||||
```.bash
|
```.bash
|
||||||
mkdir -p ~/dev/ansible_collections/community
|
mkdir -p ~/dev/ansible_collections/community
|
||||||
@@ -56,20 +103,27 @@ cd ~/dev/ansible_collections/community/general
|
|||||||
|
|
||||||
Then you can run `ansible-test` (which is a part of [ansible-core](https://pypi.org/project/ansible-core/)) inside the checkout. The following example commands expect that you have installed Docker or Podman. Note that Podman has only been supported by more recent ansible-core releases. If you are using Docker, the following will work with Ansible 2.9+.
|
Then you can run `ansible-test` (which is a part of [ansible-core](https://pypi.org/project/ansible-core/)) inside the checkout. The following example commands expect that you have installed Docker or Podman. Note that Podman has only been supported by more recent ansible-core releases. If you are using Docker, the following will work with Ansible 2.9+.
|
||||||
|
|
||||||
### Sanity tests
|
### Basic sanity tests
|
||||||
|
|
||||||
The following commands show how to run sanity tests:
|
The following commands show how to run basic sanity tests:
|
||||||
|
|
||||||
```.bash
|
```.bash
|
||||||
# Run sanity tests for all files in the collection:
|
# Run basic sanity tests for all files in the collection:
|
||||||
ansible-test sanity --docker -v
|
ansible-test sanity --docker -v
|
||||||
|
|
||||||
# Run sanity tests for the given files and directories:
|
# Run basic sanity tests for the given files and directories:
|
||||||
ansible-test sanity --docker -v plugins/modules/system/pids.py tests/integration/targets/pids/
|
ansible-test sanity --docker -v plugins/modules/system/pids.py tests/integration/targets/pids/
|
||||||
```
|
```
|
||||||
|
|
||||||
### Unit tests
|
### Unit tests
|
||||||
|
|
||||||
|
Note that for running unit tests, you need to install required collections in the same folder structure that `community.general` is checked out in.
|
||||||
|
Right now, you need to install [`community.internal_test_tools`](https://github.com/ansible-collections/community.internal_test_tools).
|
||||||
|
If you want to use the latest version from GitHub, you can run:
|
||||||
|
```
|
||||||
|
git clone https://github.com/ansible-collections/community.internal_test_tools.git ~/dev/ansible_collections/community/internal_test_tools
|
||||||
|
```
|
||||||
|
|
||||||
The following commands show how to run unit tests:
|
The following commands show how to run unit tests:
|
||||||
|
|
||||||
```.bash
|
```.bash
|
||||||
@@ -85,6 +139,16 @@ ansible-test units --docker -v --python 3.8 tests/unit/plugins/modules/net_tools
|
|||||||
|
|
||||||
### Integration tests
|
### Integration tests
|
||||||
|
|
||||||
|
Note that for running integration tests, you need to install required collections in the same folder structure that `community.general` is checked out in.
|
||||||
|
Right now, depending on the test, you need to install [`ansible.posix`](https://github.com/ansible-collections/ansible.posix), [`community.crypto`](https://github.com/ansible-collections/community.crypto), and [`community.docker`](https://github.com/ansible-collections/community.docker):
|
||||||
|
If you want to use the latest versions from GitHub, you can run:
|
||||||
|
```
|
||||||
|
mkdir -p ~/dev/ansible_collections/ansible
|
||||||
|
git clone https://github.com/ansible-collections/ansible.posix.git ~/dev/ansible_collections/ansible/posix
|
||||||
|
git clone https://github.com/ansible-collections/community.crypto.git ~/dev/ansible_collections/community/crypto
|
||||||
|
git clone https://github.com/ansible-collections/community.docker.git ~/dev/ansible_collections/community/docker
|
||||||
|
```
|
||||||
|
|
||||||
The following commands show how to run integration tests:
|
The following commands show how to run integration tests:
|
||||||
|
|
||||||
#### In Docker
|
#### In Docker
|
||||||
@@ -92,8 +156,8 @@ The following commands show how to run integration tests:
|
|||||||
Integration tests on Docker have the following parameters:
|
Integration tests on Docker have the following parameters:
|
||||||
- `image_name` (required): The name of the Docker image. To get the list of supported Docker images, run
|
- `image_name` (required): The name of the Docker image. To get the list of supported Docker images, run
|
||||||
`ansible-test integration --help` and look for _target docker images_.
|
`ansible-test integration --help` and look for _target docker images_.
|
||||||
- `test_name` (optional): The name of the integration test.
|
- `test_name` (optional): The name of the integration test.
|
||||||
For modules, this equals the short name of the module; for example, `pacman` in case of `community.general.pacman`.
|
For modules, this equals the short name of the module; for example, `pacman` in case of `community.general.pacman`.
|
||||||
For plugins, the plugin type is added before the plugin's short name, for example `callback_yaml` for the `community.general.yaml` callback.
|
For plugins, the plugin type is added before the plugin's short name, for example `callback_yaml` for the `community.general.yaml` callback.
|
||||||
```.bash
|
```.bash
|
||||||
# Test all plugins/modules on fedora40
|
# Test all plugins/modules on fedora40
|
||||||
@@ -129,9 +193,9 @@ Creating new modules and plugins requires a bit more work than other Pull Reques
|
|||||||
|
|
||||||
3. When creating a new module or plugin, please make sure that you follow various guidelines:
|
3. When creating a new module or plugin, please make sure that you follow various guidelines:
|
||||||
|
|
||||||
- Follow [development conventions](https://docs.ansible.com/ansible/devel/dev_guide/developing_modules_best_practices.html);
|
- Follow [development conventions](https://docs.ansible.com/projects/ansible/devel/dev_guide/developing_modules_best_practices.html);
|
||||||
- Follow [documentation standards](https://docs.ansible.com/ansible/devel/dev_guide/developing_modules_documenting.html) and
|
- Follow [documentation standards](https://docs.ansible.com/projects/ansible/devel/dev_guide/developing_modules_documenting.html) and
|
||||||
the [Ansible style guide](https://docs.ansible.com/ansible/devel/dev_guide/style_guide/index.html#style-guide);
|
the [Ansible style guide](https://docs.ansible.com/projects/ansible/devel/dev_guide/style_guide/index.html#style-guide);
|
||||||
- Make sure your modules and plugins are [GPL-3.0-or-later](https://www.gnu.org/licenses/gpl-3.0-standalone.html) licensed
|
- Make sure your modules and plugins are [GPL-3.0-or-later](https://www.gnu.org/licenses/gpl-3.0-standalone.html) licensed
|
||||||
(new module_utils can also be [BSD-2-clause](https://opensource.org/licenses/BSD-2-Clause) licensed);
|
(new module_utils can also be [BSD-2-clause](https://opensource.org/licenses/BSD-2-Clause) licensed);
|
||||||
- Make sure that new plugins and modules have tests (unit tests, integration tests, or both); it is preferable to have some tests
|
- Make sure that new plugins and modules have tests (unit tests, integration tests, or both); it is preferable to have some tests
|
||||||
|
|||||||
42
README.md
42
README.md
@@ -6,22 +6,24 @@ SPDX-License-Identifier: GPL-3.0-or-later
|
|||||||
|
|
||||||
# Community General Collection
|
# Community General Collection
|
||||||
|
|
||||||
[](https://dev.azure.com/ansible/community.general/_build?definitionId=31)
|
[](https://docs.ansible.com/projects/ansible/latest/collections/community/general/)
|
||||||
[](https://github.com/ansible-collections/community.general/actions)
|
[](https://dev.azure.com/ansible/community.general/_build?definitionId=31)
|
||||||
|
[](https://github.com/ansible-collections/community.general/actions)
|
||||||
|
[](https://github.com/ansible-collections/community.general/actions)
|
||||||
[](https://codecov.io/gh/ansible-collections/community.general)
|
[](https://codecov.io/gh/ansible-collections/community.general)
|
||||||
[](https://api.reuse.software/info/github.com/ansible-collections/community.general)
|
[](https://api.reuse.software/info/github.com/ansible-collections/community.general)
|
||||||
|
|
||||||
This repository contains the `community.general` Ansible Collection. The collection is a part of the Ansible package and includes many modules and plugins supported by Ansible community which are not part of more specialized community collections.
|
This repository contains the `community.general` Ansible Collection. The collection is a part of the Ansible package and includes many modules and plugins supported by Ansible community which are not part of more specialized community collections.
|
||||||
|
|
||||||
You can find [documentation for this collection on the Ansible docs site](https://docs.ansible.com/ansible/latest/collections/community/general/).
|
You can find [documentation for this collection on the Ansible docs site](https://docs.ansible.com/projects/ansible/latest/collections/community/general/).
|
||||||
|
|
||||||
Please note that this collection does **not** support Windows targets. Only connection plugins included in this collection might support Windows targets, and will explicitly mention that in their documentation if they do so.
|
Please note that this collection does **not** support Windows targets. Only connection plugins included in this collection might support Windows targets, and will explicitly mention that in their documentation if they do so.
|
||||||
|
|
||||||
## Code of Conduct
|
## Code of Conduct
|
||||||
|
|
||||||
We follow [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) in all our interactions within this project.
|
We follow [Ansible Code of Conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html) in all our interactions within this project.
|
||||||
|
|
||||||
If you encounter abusive behavior violating the [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html), please refer to the [policy violations](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html#policy-violations) section of the Code of Conduct for information on how to raise a complaint.
|
If you encounter abusive behavior violating the [Ansible Code of Conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html), please refer to the [policy violations](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html#policy-violations) section of the Code of Conduct for information on how to raise a complaint.
|
||||||
|
|
||||||
## Communication
|
## Communication
|
||||||
|
|
||||||
@@ -31,13 +33,13 @@ If you encounter abusive behavior violating the [Ansible Code of Conduct](https:
|
|||||||
* [Social Spaces](https://forum.ansible.com/c/chat/4): gather and interact with fellow enthusiasts.
|
* [Social Spaces](https://forum.ansible.com/c/chat/4): gather and interact with fellow enthusiasts.
|
||||||
* [News & Announcements](https://forum.ansible.com/c/news/5): track project-wide announcements including social events.
|
* [News & Announcements](https://forum.ansible.com/c/news/5): track project-wide announcements including social events.
|
||||||
|
|
||||||
* The Ansible [Bullhorn newsletter](https://docs.ansible.com/ansible/devel/community/communication.html#the-bullhorn): used to announce releases and important changes.
|
* The Ansible [Bullhorn newsletter](https://docs.ansible.com/projects/ansible/devel/community/communication.html#the-bullhorn): used to announce releases and important changes.
|
||||||
|
|
||||||
For more information about communication, see the [Ansible communication guide](https://docs.ansible.com/ansible/devel/community/communication.html).
|
For more information about communication, see the [Ansible communication guide](https://docs.ansible.com/projects/ansible/devel/community/communication.html).
|
||||||
|
|
||||||
## Tested with Ansible
|
## Tested with Ansible
|
||||||
|
|
||||||
Tested with the current ansible-core 2.13, ansible-core 2.14, ansible-core 2.15, ansible-core 2.16, ansible-core 2.17 releases and the current development version of ansible-core. Ansible-core versions before 2.13.0 are not supported. This includes all ansible-base 2.10 and Ansible 2.9 releases.
|
Tested with the current ansible-core 2.15, ansible-core 2.16, ansible-core 2.17, ansible-core 2.18, ansible-core 2.19, and ansible-core 2.20 releases. Ansible-core versions before 2.15.0 are not supported. This includes all ansible-base 2.10 and Ansible 2.9 releases.
|
||||||
|
|
||||||
## External requirements
|
## External requirements
|
||||||
|
|
||||||
@@ -45,7 +47,7 @@ Some modules and plugins require external libraries. Please check the requiremen
|
|||||||
|
|
||||||
## Included content
|
## Included content
|
||||||
|
|
||||||
Please check the included content on the [Ansible Galaxy page for this collection](https://galaxy.ansible.com/ui/repo/published/community/general/) or the [documentation on the Ansible docs site](https://docs.ansible.com/ansible/latest/collections/community/general/).
|
Please check the included content on the [Ansible Galaxy page for this collection](https://galaxy.ansible.com/ui/repo/published/community/general/) or the [documentation on the Ansible docs site](https://docs.ansible.com/projects/ansible/latest/collections/community/general/).
|
||||||
|
|
||||||
## Using this collection
|
## Using this collection
|
||||||
|
|
||||||
@@ -74,7 +76,7 @@ You can also install a specific version of the collection, for example, if you n
|
|||||||
ansible-galaxy collection install community.general:==X.Y.Z
|
ansible-galaxy collection install community.general:==X.Y.Z
|
||||||
```
|
```
|
||||||
|
|
||||||
See [Ansible Using collections](https://docs.ansible.com/ansible/latest/user_guide/collections_using.html) for more details.
|
See [Ansible Using collections](https://docs.ansible.com/projects/ansible/latest/user_guide/collections_using.html) for more details.
|
||||||
|
|
||||||
## Contributing to this collection
|
## Contributing to this collection
|
||||||
|
|
||||||
@@ -88,13 +90,13 @@ You don't know how to start? Refer to our [contribution guide](https://github.co
|
|||||||
|
|
||||||
The current maintainers are listed in the [commit-rights.md](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md#people) file. If you have questions or need help, feel free to mention them in the proposals.
|
The current maintainers are listed in the [commit-rights.md](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md#people) file. If you have questions or need help, feel free to mention them in the proposals.
|
||||||
|
|
||||||
You can find more information in the [developer guide for collections](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#contributing-to-collections), and in the [Ansible Community Guide](https://docs.ansible.com/ansible/latest/community/index.html).
|
You can find more information in the [developer guide for collections](https://docs.ansible.com/projects/ansible/devel/dev_guide/developing_collections.html#contributing-to-collections), and in the [Ansible Community Guide](https://docs.ansible.com/projects/ansible/latest/community/index.html).
|
||||||
|
|
||||||
Also for some notes specific to this collection see [our CONTRIBUTING documentation](https://github.com/ansible-collections/community.general/blob/main/CONTRIBUTING.md).
|
Also for some notes specific to this collection see [our CONTRIBUTING documentation](https://github.com/ansible-collections/community.general/blob/main/CONTRIBUTING.md).
|
||||||
|
|
||||||
### Running tests
|
### Running tests
|
||||||
|
|
||||||
See [here](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#testing-collections).
|
See [here](https://docs.ansible.com/projects/ansible/devel/dev_guide/developing_collections.html#testing-collections).
|
||||||
|
|
||||||
## Collection maintenance
|
## Collection maintenance
|
||||||
|
|
||||||
@@ -108,7 +110,7 @@ It is necessary for maintainers of this collection to be subscribed to:
|
|||||||
* The collection itself (the `Watch` button → `All Activity` in the upper right corner of the repository's homepage).
|
* The collection itself (the `Watch` button → `All Activity` in the upper right corner of the repository's homepage).
|
||||||
* The "Changes Impacting Collection Contributors and Maintainers" [issue](https://github.com/ansible-collections/overview/issues/45).
|
* The "Changes Impacting Collection Contributors and Maintainers" [issue](https://github.com/ansible-collections/overview/issues/45).
|
||||||
|
|
||||||
They also should be subscribed to Ansible's [The Bullhorn newsletter](https://docs.ansible.com/ansible/devel/community/communication.html#the-bullhorn).
|
They also should be subscribed to Ansible's [The Bullhorn newsletter](https://docs.ansible.com/projects/ansible/devel/community/communication.html#the-bullhorn).
|
||||||
|
|
||||||
## Publishing New Version
|
## Publishing New Version
|
||||||
|
|
||||||
@@ -116,7 +118,7 @@ See the [Releasing guidelines](https://github.com/ansible/community-docs/blob/ma
|
|||||||
|
|
||||||
## Release notes
|
## Release notes
|
||||||
|
|
||||||
See the [changelog](https://github.com/ansible-collections/community.general/blob/stable-9/CHANGELOG.md).
|
See the [changelog](https://github.com/ansible-collections/community.general/blob/stable-10/CHANGELOG.md).
|
||||||
|
|
||||||
## Roadmap
|
## Roadmap
|
||||||
|
|
||||||
@@ -127,16 +129,16 @@ See [this issue](https://github.com/ansible-collections/community.general/issues
|
|||||||
## More information
|
## More information
|
||||||
|
|
||||||
- [Ansible Collection overview](https://github.com/ansible-collections/overview)
|
- [Ansible Collection overview](https://github.com/ansible-collections/overview)
|
||||||
- [Ansible User guide](https://docs.ansible.com/ansible/latest/user_guide/index.html)
|
- [Ansible User guide](https://docs.ansible.com/projects/ansible/latest/user_guide/index.html)
|
||||||
- [Ansible Developer guide](https://docs.ansible.com/ansible/latest/dev_guide/index.html)
|
- [Ansible Developer guide](https://docs.ansible.com/projects/ansible/latest/dev_guide/index.html)
|
||||||
- [Ansible Community code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html)
|
- [Ansible Community code of conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html)
|
||||||
|
|
||||||
## Licensing
|
## Licensing
|
||||||
|
|
||||||
This collection is primarily licensed and distributed as a whole under the GNU General Public License v3.0 or later.
|
This collection is primarily licensed and distributed as a whole under the GNU General Public License v3.0 or later.
|
||||||
|
|
||||||
See [LICENSES/GPL-3.0-or-later.txt](https://github.com/ansible-collections/community.general/blob/stable-9/COPYING) for the full text.
|
See [LICENSES/GPL-3.0-or-later.txt](https://github.com/ansible-collections/community.general/blob/stable-10/COPYING) for the full text.
|
||||||
|
|
||||||
Parts of the collection are licensed under the [BSD 2-Clause license](https://github.com/ansible-collections/community.general/blob/stable-9/LICENSES/BSD-2-Clause.txt), the [MIT license](https://github.com/ansible-collections/community.general/blob/stable-9/LICENSES/MIT.txt), and the [PSF 2.0 license](https://github.com/ansible-collections/community.general/blob/stable-9/LICENSES/PSF-2.0.txt).
|
Parts of the collection are licensed under the [BSD 2-Clause license](https://github.com/ansible-collections/community.general/blob/stable-10/LICENSES/BSD-2-Clause.txt), the [MIT license](https://github.com/ansible-collections/community.general/blob/stable-10/LICENSES/MIT.txt), and the [PSF 2.0 license](https://github.com/ansible-collections/community.general/blob/stable-10/LICENSES/PSF-2.0.txt).
|
||||||
|
|
||||||
All files have a machine readable `SDPX-License-Identifier:` comment denoting its respective license(s) or an equivalent entry in an accompanying `.license` file. Only changelog fragments (which will not be part of a release) are covered by a blanket statement in `.reuse/dep5`. This conforms to the [REUSE specification](https://reuse.software/spec/).
|
All files have a machine readable `SDPX-License-Identifier:` comment denoting its respective license(s) or an equivalent entry in an accompanying `.license` file. Only changelog fragments (which will not be part of a release) are covered by a blanket statement in `REUSE.toml`. This conforms to the [REUSE specification](https://reuse.software/spec/).
|
||||||
|
|||||||
11
REUSE.toml
Normal file
11
REUSE.toml
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
# Copyright (c) Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
version = 1
|
||||||
|
|
||||||
|
[[annotations]]
|
||||||
|
path = "changelogs/fragments/**"
|
||||||
|
precedence = "aggregate"
|
||||||
|
SPDX-FileCopyrightText = "Ansible Project"
|
||||||
|
SPDX-License-Identifier = "GPL-3.0-or-later"
|
||||||
67
antsibull-nox.toml
Normal file
67
antsibull-nox.toml
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
# SPDX-FileCopyrightText: 2025 Felix Fontein <felix@fontein.de>
|
||||||
|
|
||||||
|
[collection_sources]
|
||||||
|
"ansible.posix" = "git+https://github.com/ansible-collections/ansible.posix.git,main"
|
||||||
|
"community.crypto" = "git+https://github.com/ansible-collections/community.crypto.git,main"
|
||||||
|
"community.docker" = "git+https://github.com/ansible-collections/community.docker.git,main"
|
||||||
|
"community.internal_test_tools" = "git+https://github.com/ansible-collections/community.internal_test_tools.git,main"
|
||||||
|
|
||||||
|
[collection_sources_per_ansible.'2.15']
|
||||||
|
# community.crypto's main branch needs ansible-core >= 2.17
|
||||||
|
"community.crypto" = "git+https://github.com/ansible-collections/community.crypto.git,stable-2"
|
||||||
|
|
||||||
|
[collection_sources_per_ansible.'2.16']
|
||||||
|
# community.crypto's main branch needs ansible-core >= 2.17
|
||||||
|
"community.crypto" = "git+https://github.com/ansible-collections/community.crypto.git,stable-2"
|
||||||
|
|
||||||
|
[vcs]
|
||||||
|
vcs = "git"
|
||||||
|
development_branch = "main"
|
||||||
|
stable_branches = [ "stable-*" ]
|
||||||
|
|
||||||
|
[sessions]
|
||||||
|
|
||||||
|
[sessions.docs_check]
|
||||||
|
validate_collection_refs="all"
|
||||||
|
|
||||||
|
[sessions.license_check]
|
||||||
|
|
||||||
|
[sessions.extra_checks]
|
||||||
|
run_no_unwanted_files = true
|
||||||
|
no_unwanted_files_module_extensions = [".py"]
|
||||||
|
no_unwanted_files_yaml_extensions = [".yml"]
|
||||||
|
run_action_groups = true
|
||||||
|
|
||||||
|
[[sessions.extra_checks.action_groups_config]]
|
||||||
|
name = "consul"
|
||||||
|
pattern = "^consul_.*$"
|
||||||
|
exclusions = [
|
||||||
|
"consul_acl_bootstrap",
|
||||||
|
"consul_kv",
|
||||||
|
]
|
||||||
|
doc_fragment = "community.general.consul.actiongroup_consul"
|
||||||
|
|
||||||
|
[[sessions.extra_checks.action_groups_config]]
|
||||||
|
name = "keycloak"
|
||||||
|
pattern = "^keycloak_.*$"
|
||||||
|
exclusions = [
|
||||||
|
"keycloak_realm_info",
|
||||||
|
]
|
||||||
|
doc_fragment = "community.general.keycloak.actiongroup_keycloak"
|
||||||
|
|
||||||
|
[[sessions.extra_checks.action_groups_config]]
|
||||||
|
name = "proxmox"
|
||||||
|
pattern = "^proxmox(_.*)?$"
|
||||||
|
exclusions = []
|
||||||
|
doc_fragment = "community.general.proxmox.actiongroup_proxmox"
|
||||||
|
|
||||||
|
[sessions.build_import_check]
|
||||||
|
run_galaxy_importer = true
|
||||||
|
|
||||||
|
[sessions.ansible_test_sanity]
|
||||||
|
include_devel = true
|
||||||
|
|
||||||
|
[sessions.ansible_test_units]
|
||||||
|
include_devel = true
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -7,9 +7,9 @@ changelog_filename_template: ../CHANGELOG.rst
|
|||||||
changelog_filename_version_depth: 0
|
changelog_filename_version_depth: 0
|
||||||
changes_file: changelog.yaml
|
changes_file: changelog.yaml
|
||||||
changes_format: combined
|
changes_format: combined
|
||||||
|
ignore_other_fragment_extensions: true
|
||||||
keep_fragments: false
|
keep_fragments: false
|
||||||
mention_ancestor: true
|
mention_ancestor: true
|
||||||
flatmap: true
|
|
||||||
new_plugins_after_name: removed_features
|
new_plugins_after_name: removed_features
|
||||||
notesdir: fragments
|
notesdir: fragments
|
||||||
output_formats:
|
output_formats:
|
||||||
@@ -40,3 +40,4 @@ use_fqcn: true
|
|||||||
add_plugin_period: true
|
add_plugin_period: true
|
||||||
changelog_nice_yaml: true
|
changelog_nice_yaml: true
|
||||||
changelog_sort: version
|
changelog_sort: version
|
||||||
|
vcs: auto
|
||||||
|
|||||||
2
changelogs/fragments/11909-fix-favicon-url.yml
Normal file
2
changelogs/fragments/11909-fix-favicon-url.yml
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
minor_changes:
|
||||||
|
- "mattermost, rocketchat, slack - update default ``icon_url`` to ansible favicon (https://github.com/ansible-collections/community.general/pull/11909)."
|
||||||
@@ -7,7 +7,7 @@ SPDX-License-Identifier: GPL-3.0-or-later
|
|||||||
Committers Guidelines for community.general
|
Committers Guidelines for community.general
|
||||||
===========================================
|
===========================================
|
||||||
|
|
||||||
This document is based on the [Ansible committer guidelines](https://github.com/ansible/ansible/blob/b57444af14062ec96e0af75fdfc2098c74fe2d9a/docs/docsite/rst/community/committer_guidelines.rst) ([latest version](https://docs.ansible.com/ansible/devel/community/committer_guidelines.html)).
|
This document is based on the [Ansible committer guidelines](https://github.com/ansible/ansible/blob/b57444af14062ec96e0af75fdfc2098c74fe2d9a/docs/docsite/rst/community/committer_guidelines.rst) ([latest version](https://docs.ansible.com/projects/ansible/devel/community/committer_guidelines.html)).
|
||||||
|
|
||||||
These are the guidelines for people with commit privileges on the Ansible Community General Collection GitHub repository. Please read the guidelines before you commit.
|
These are the guidelines for people with commit privileges on the Ansible Community General Collection GitHub repository. Please read the guidelines before you commit.
|
||||||
|
|
||||||
@@ -45,7 +45,7 @@ Individuals with direct commit access to this collection repository are entruste
|
|||||||
- Do not commit directly.
|
- Do not commit directly.
|
||||||
- Do not merge your own PRs. Someone else should have a chance to review and approve the PR merge. You have a small amount of leeway here for very minor changes.
|
- Do not merge your own PRs. Someone else should have a chance to review and approve the PR merge. You have a small amount of leeway here for very minor changes.
|
||||||
- Do not forget about non-standard / alternate environments. Consider the alternatives. Yes, people have bad/unusual/strange environments (like binaries from multiple init systems installed), but they are the ones who need us the most.
|
- Do not forget about non-standard / alternate environments. Consider the alternatives. Yes, people have bad/unusual/strange environments (like binaries from multiple init systems installed), but they are the ones who need us the most.
|
||||||
- Do not drag your community team members down. Discuss the technical merits of any pull requests you review. Avoid negativity and personal comments. For more guidance on being a good community member, read the [Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
|
- Do not drag your community team members down. Discuss the technical merits of any pull requests you review. Avoid negativity and personal comments. For more guidance on being a good community member, read the [Ansible Community Code of Conduct](https://docs.ansible.com/projects/ansible/latest/community/code_of_conduct.html).
|
||||||
- Do not forget about the maintenance burden. High-maintenance features may not be worth adding.
|
- Do not forget about the maintenance burden. High-maintenance features may not be worth adding.
|
||||||
- Do not break playbooks. Always keep backwards compatibility in mind.
|
- Do not break playbooks. Always keep backwards compatibility in mind.
|
||||||
- Do not forget to keep it simple. Complexity breeds all kinds of problems.
|
- Do not forget to keep it simple. Complexity breeds all kinds of problems.
|
||||||
|
|||||||
@@ -20,3 +20,4 @@ sections:
|
|||||||
- guide_vardict
|
- guide_vardict
|
||||||
- guide_cmdrunner
|
- guide_cmdrunner
|
||||||
- guide_modulehelper
|
- guide_modulehelper
|
||||||
|
- guide_uthelper
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
community.general Filter Guide
|
community.general Filter Guide
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
The :ref:`community.general collection <plugins_in_community.general>` offers several useful filter plugins.
|
The :anscollection:`community.general collection <community.general#collection>` offers several useful filter plugins.
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
|||||||
@@ -26,8 +26,8 @@ You can use the :ansplugin:`community.general.dict_kv filter <community.general.
|
|||||||
type: host
|
type: host
|
||||||
database: all
|
database: all
|
||||||
myservers:
|
myservers:
|
||||||
- server1
|
- server1
|
||||||
- server2
|
- server2
|
||||||
|
|
||||||
This produces:
|
This produces:
|
||||||
|
|
||||||
|
|||||||
@@ -65,7 +65,7 @@ All three statements are equivalent and give:
|
|||||||
|
|
||||||
.. note:: Be aware that in most cases, filter calls without any argument require ``flatten=true``, otherwise the input is returned as result. The reason for this is, that the input is considered as a variable argument and is wrapped by an additional outer list. ``flatten=true`` ensures that this list is removed before the input is processed by the filter logic.
|
.. note:: Be aware that in most cases, filter calls without any argument require ``flatten=true``, otherwise the input is returned as result. The reason for this is, that the input is considered as a variable argument and is wrapped by an additional outer list. ``flatten=true`` ensures that this list is removed before the input is processed by the filter logic.
|
||||||
|
|
||||||
The filters ansplugin:`community.general.lists_difference#filter` or :ansplugin:`community.general.lists_symmetric_difference#filter` can be used in the same way as the filters in the examples above. They calculate the difference or the symmetric difference between two or more lists and preserve the item order.
|
The filters :ansplugin:`community.general.lists_difference#filter` or :ansplugin:`community.general.lists_symmetric_difference#filter` can be used in the same way as the filters in the examples above. They calculate the difference or the symmetric difference between two or more lists and preserve the item order.
|
||||||
|
|
||||||
For example, the symmetric difference of ``A``, ``B`` and ``C`` may be written as:
|
For example, the symmetric difference of ``A``, ``B`` and ``C`` may be written as:
|
||||||
|
|
||||||
|
|||||||
@@ -17,50 +17,50 @@ Consider this data structure:
|
|||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
{
|
{
|
||||||
"domain_definition": {
|
"domain_definition": {
|
||||||
"domain": {
|
"domain": {
|
||||||
"cluster": [
|
"cluster": [
|
||||||
{
|
{
|
||||||
"name": "cluster1"
|
"name": "cluster1"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "cluster2"
|
"name": "cluster2"
|
||||||
}
|
|
||||||
],
|
|
||||||
"server": [
|
|
||||||
{
|
|
||||||
"name": "server11",
|
|
||||||
"cluster": "cluster1",
|
|
||||||
"port": "8080"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "server12",
|
|
||||||
"cluster": "cluster1",
|
|
||||||
"port": "8090"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "server21",
|
|
||||||
"cluster": "cluster2",
|
|
||||||
"port": "9080"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "server22",
|
|
||||||
"cluster": "cluster2",
|
|
||||||
"port": "9090"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"library": [
|
|
||||||
{
|
|
||||||
"name": "lib1",
|
|
||||||
"target": "cluster1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "lib2",
|
|
||||||
"target": "cluster2"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
],
|
||||||
|
"server": [
|
||||||
|
{
|
||||||
|
"name": "server11",
|
||||||
|
"cluster": "cluster1",
|
||||||
|
"port": "8080"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "server12",
|
||||||
|
"cluster": "cluster1",
|
||||||
|
"port": "8090"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "server21",
|
||||||
|
"cluster": "cluster2",
|
||||||
|
"port": "9080"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "server22",
|
||||||
|
"cluster": "cluster2",
|
||||||
|
"port": "9090"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"library": [
|
||||||
|
{
|
||||||
|
"name": "lib1",
|
||||||
|
"target": "cluster1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "lib2",
|
||||||
|
"target": "cluster2"
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
To extract all clusters from this structure, you can use the following query:
|
To extract all clusters from this structure, you can use the following query:
|
||||||
@@ -124,7 +124,7 @@ To get a hash map with all ports and names of a cluster:
|
|||||||
var: item
|
var: item
|
||||||
loop: "{{ domain_definition | community.general.json_query(server_name_cluster1_query) }}"
|
loop: "{{ domain_definition | community.general.json_query(server_name_cluster1_query) }}"
|
||||||
vars:
|
vars:
|
||||||
server_name_cluster1_query: "domain.server[?cluster=='cluster2'].{name: name, port: port}"
|
server_name_cluster1_query: "domain.server[?cluster=='cluster1'].{name: name, port: port}"
|
||||||
|
|
||||||
To extract ports from all clusters with name starting with 'server1':
|
To extract ports from all clusters with name starting with 'server1':
|
||||||
|
|
||||||
|
|||||||
@@ -78,17 +78,17 @@ If you do not specify a ``count_tag``, the task creates the number of instances
|
|||||||
tasks:
|
tasks:
|
||||||
- name: Create a set of instances
|
- name: Create a set of instances
|
||||||
community.general.ali_instance:
|
community.general.ali_instance:
|
||||||
instance_type: ecs.n4.small
|
instance_type: ecs.n4.small
|
||||||
image_id: "{{ ami_id }}"
|
image_id: "{{ ami_id }}"
|
||||||
instance_name: "My-new-instance"
|
instance_name: "My-new-instance"
|
||||||
instance_tags:
|
instance_tags:
|
||||||
Name: NewECS
|
Name: NewECS
|
||||||
Version: 0.0.1
|
Version: 0.0.1
|
||||||
count: 5
|
count: 5
|
||||||
count_tag:
|
count_tag:
|
||||||
Name: NewECS
|
Name: NewECS
|
||||||
allocate_public_ip: true
|
allocate_public_ip: true
|
||||||
max_bandwidth_out: 50
|
max_bandwidth_out: 50
|
||||||
register: create_instance
|
register: create_instance
|
||||||
|
|
||||||
In the example playbook above, data about the instances created by this playbook is saved in the variable defined by the ``register`` keyword in the task.
|
In the example playbook above, data about the instances created by this playbook is saved in the variable defined by the ``register`` keyword in the task.
|
||||||
|
|||||||
@@ -68,20 +68,27 @@ This is meant to be done once, then every time you need to execute the command y
|
|||||||
with runner("version") as ctx:
|
with runner("version") as ctx:
|
||||||
dummy, stdout, dummy = ctx.run()
|
dummy, stdout, dummy = ctx.run()
|
||||||
|
|
||||||
|
# passes arg 'data' to AnsibleModule.run_command()
|
||||||
|
with runner("type name", data=stdin_data) as ctx:
|
||||||
|
dummy, stdout, dummy = ctx.run()
|
||||||
|
|
||||||
# Another way of expressing it
|
# Another way of expressing it
|
||||||
dummy, stdout, dummy = runner("version").run()
|
dummy, stdout, dummy = runner("version").run()
|
||||||
|
|
||||||
Note that you can pass values for the arguments when calling ``run()``,
|
Note that you can pass values for the arguments when calling ``run()``, otherwise ``CmdRunner``
|
||||||
otherwise ``CmdRunner`` uses the module options with the exact same names to
|
uses the module options with the exact same names to provide values for the runner arguments.
|
||||||
provide values for the runner arguments. If no value is passed and no module option
|
If no value is passed and no module option is found for the name specified, then an exception is raised, unless
|
||||||
is found for the name specified, then an exception is raised, unless the
|
the argument is using ``cmd_runner_fmt.as_fixed`` as format function like the ``version`` in the example above.
|
||||||
argument is using ``cmd_runner_fmt.as_fixed`` as format function like the
|
See more about it below.
|
||||||
``version`` in the example above. See more about it below.
|
|
||||||
|
|
||||||
In the first example, values of ``type``, ``force``, ``no_deps`` and others
|
In the first example, values of ``type``, ``force``, ``no_deps`` and others
|
||||||
are taken straight from the module, whilst ``galaxy_cmd`` and ``upgrade`` are
|
are taken straight from the module, whilst ``galaxy_cmd`` and ``upgrade`` are
|
||||||
passed explicitly.
|
passed explicitly.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
It is not possible to automatically retrieve values of suboptions.
|
||||||
|
|
||||||
That generates a resulting command line similar to (example taken from the
|
That generates a resulting command line similar to (example taken from the
|
||||||
output of an integration test):
|
output of an integration test):
|
||||||
|
|
||||||
@@ -110,7 +117,7 @@ into something formatted for the command line.
|
|||||||
Argument format function
|
Argument format function
|
||||||
""""""""""""""""""""""""
|
""""""""""""""""""""""""
|
||||||
|
|
||||||
An ``arg_format`` function should be of the form:
|
An ``arg_format`` function is defined in the form similar to:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
@@ -155,7 +162,7 @@ In these descriptions ``value`` refers to the single parameter passed to the for
|
|||||||
|
|
||||||
- Creation:
|
- Creation:
|
||||||
``cmd_runner_fmt.as_list()``
|
``cmd_runner_fmt.as_list()``
|
||||||
- Example:
|
- Examples:
|
||||||
+----------------------+---------------------+
|
+----------------------+---------------------+
|
||||||
| Value | Outcome |
|
| Value | Outcome |
|
||||||
+======================+=====================+
|
+======================+=====================+
|
||||||
@@ -167,12 +174,11 @@ In these descriptions ``value`` refers to the single parameter passed to the for
|
|||||||
- ``cmd_runner_fmt.as_bool()``
|
- ``cmd_runner_fmt.as_bool()``
|
||||||
This method receives two different parameters: ``args_true`` and ``args_false``, latter being optional.
|
This method receives two different parameters: ``args_true`` and ``args_false``, latter being optional.
|
||||||
If the boolean evaluation of ``value`` is ``True``, the format function returns ``args_true``.
|
If the boolean evaluation of ``value`` is ``True``, the format function returns ``args_true``.
|
||||||
If the boolean evaluation is ``False``, then the function returns ``args_false``
|
If the boolean evaluation is ``False``, then the function returns ``args_false`` if it was provided, or ``[]`` otherwise.
|
||||||
if it was provided, or ``[]`` otherwise.
|
|
||||||
|
|
||||||
- Creation:
|
- Creation (one arg):
|
||||||
``cmd_runner_fmt.as_bool("--force")``
|
``cmd_runner_fmt.as_bool("--force")``
|
||||||
- Example:
|
- Examples:
|
||||||
+------------+--------------------+
|
+------------+--------------------+
|
||||||
| Value | Outcome |
|
| Value | Outcome |
|
||||||
+============+====================+
|
+============+====================+
|
||||||
@@ -180,6 +186,30 @@ In these descriptions ``value`` refers to the single parameter passed to the for
|
|||||||
+------------+--------------------+
|
+------------+--------------------+
|
||||||
| ``False`` | ``[]`` |
|
| ``False`` | ``[]`` |
|
||||||
+------------+--------------------+
|
+------------+--------------------+
|
||||||
|
- Creation (two args, ``None`` treated as ``False``):
|
||||||
|
``cmd_runner_fmt.as_bool("--relax", "--dont-do-it")``
|
||||||
|
- Examples:
|
||||||
|
+------------+----------------------+
|
||||||
|
| Value | Outcome |
|
||||||
|
+============+======================+
|
||||||
|
| ``True`` | ``["--relax"]`` |
|
||||||
|
+------------+----------------------+
|
||||||
|
| ``False`` | ``["--dont-do-it"]`` |
|
||||||
|
+------------+----------------------+
|
||||||
|
| | ``["--dont-do-it"]`` |
|
||||||
|
+------------+----------------------+
|
||||||
|
- Creation (two args, ``None`` is ignored):
|
||||||
|
``cmd_runner_fmt.as_bool("--relax", "--dont-do-it", ignore_none=True)``
|
||||||
|
- Examples:
|
||||||
|
+------------+----------------------+
|
||||||
|
| Value | Outcome |
|
||||||
|
+============+======================+
|
||||||
|
| ``True`` | ``["--relax"]`` |
|
||||||
|
+------------+----------------------+
|
||||||
|
| ``False`` | ``["--dont-do-it"]`` |
|
||||||
|
+------------+----------------------+
|
||||||
|
| | ``[]`` |
|
||||||
|
+------------+----------------------+
|
||||||
|
|
||||||
- ``cmd_runner_fmt.as_bool_not()``
|
- ``cmd_runner_fmt.as_bool_not()``
|
||||||
This method receives one parameter, which is returned by the function when the boolean evaluation
|
This method receives one parameter, which is returned by the function when the boolean evaluation
|
||||||
@@ -187,7 +217,7 @@ In these descriptions ``value`` refers to the single parameter passed to the for
|
|||||||
|
|
||||||
- Creation:
|
- Creation:
|
||||||
``cmd_runner_fmt.as_bool_not("--no-deps")``
|
``cmd_runner_fmt.as_bool_not("--no-deps")``
|
||||||
- Example:
|
- Examples:
|
||||||
+-------------+---------------------+
|
+-------------+---------------------+
|
||||||
| Value | Outcome |
|
| Value | Outcome |
|
||||||
+=============+=====================+
|
+=============+=====================+
|
||||||
@@ -202,7 +232,7 @@ In these descriptions ``value`` refers to the single parameter passed to the for
|
|||||||
|
|
||||||
- Creation:
|
- Creation:
|
||||||
``cmd_runner_fmt.as_optval("-i")``
|
``cmd_runner_fmt.as_optval("-i")``
|
||||||
- Example:
|
- Examples:
|
||||||
+---------------+---------------------+
|
+---------------+---------------------+
|
||||||
| Value | Outcome |
|
| Value | Outcome |
|
||||||
+===============+=====================+
|
+===============+=====================+
|
||||||
@@ -216,7 +246,7 @@ In these descriptions ``value`` refers to the single parameter passed to the for
|
|||||||
|
|
||||||
- Creation:
|
- Creation:
|
||||||
``cmd_runner_fmt.as_opt_val("--name")``
|
``cmd_runner_fmt.as_opt_val("--name")``
|
||||||
- Example:
|
- Examples:
|
||||||
+--------------+--------------------------+
|
+--------------+--------------------------+
|
||||||
| Value | Outcome |
|
| Value | Outcome |
|
||||||
+==============+==========================+
|
+==============+==========================+
|
||||||
@@ -229,7 +259,7 @@ In these descriptions ``value`` refers to the single parameter passed to the for
|
|||||||
|
|
||||||
- Creation:
|
- Creation:
|
||||||
``cmd_runner_fmt.as_opt_eq_val("--num-cpus")``
|
``cmd_runner_fmt.as_opt_eq_val("--num-cpus")``
|
||||||
- Example:
|
- Examples:
|
||||||
+------------+-------------------------+
|
+------------+-------------------------+
|
||||||
| Value | Outcome |
|
| Value | Outcome |
|
||||||
+============+=========================+
|
+============+=========================+
|
||||||
@@ -237,24 +267,54 @@ In these descriptions ``value`` refers to the single parameter passed to the for
|
|||||||
+------------+-------------------------+
|
+------------+-------------------------+
|
||||||
|
|
||||||
- ``cmd_runner_fmt.as_fixed()``
|
- ``cmd_runner_fmt.as_fixed()``
|
||||||
This method receives one parameter ``arg``, the function expects no ``value`` - if one
|
This method defines one or more fixed arguments that are returned by the generated function
|
||||||
is provided then it is ignored.
|
regardless whether ``value`` is passed to it or not.
|
||||||
The function returns ``arg`` as-is.
|
|
||||||
|
|
||||||
- Creation:
|
This method accepts these arguments in one of three forms:
|
||||||
``cmd_runner_fmt.as_fixed("--version")``
|
|
||||||
- Example:
|
* one scalar parameter ``arg``, which will be returned as ``[arg]`` by the function, or
|
||||||
+---------+-----------------------+
|
* one sequence parameter, such as a list, ``arg``, which will be returned by the function as ``arg[0]``, or
|
||||||
| Value | Outcome |
|
* multiple parameters ``args``, which will be returned as ``args`` directly by the function.
|
||||||
+=========+=======================+
|
|
||||||
| | ``["--version"]`` |
|
See the examples below for each one of those forms. And, stressing that the generated function expects no ``value`` - if one
|
||||||
+---------+-----------------------+
|
is provided then it is ignored.
|
||||||
| 57 | ``["--version"]`` |
|
|
||||||
+---------+-----------------------+
|
- Creation (one scalar argument):
|
||||||
|
* ``cmd_runner_fmt.as_fixed("--version")``
|
||||||
|
- Examples:
|
||||||
|
+---------+--------------------------------------+
|
||||||
|
| Value | Outcome |
|
||||||
|
+=========+======================================+
|
||||||
|
| | * ``["--version"]`` |
|
||||||
|
+---------+--------------------------------------+
|
||||||
|
| 57 | * ``["--version"]`` |
|
||||||
|
+---------+--------------------------------------+
|
||||||
|
|
||||||
|
- Creation (one sequence argument):
|
||||||
|
* ``cmd_runner_fmt.as_fixed(["--list", "--json"])``
|
||||||
|
- Examples:
|
||||||
|
+---------+--------------------------------------+
|
||||||
|
| Value | Outcome |
|
||||||
|
+=========+======================================+
|
||||||
|
| | * ``["--list", "--json"]`` |
|
||||||
|
+---------+--------------------------------------+
|
||||||
|
| True | * ``["--list", "--json"]`` |
|
||||||
|
+---------+--------------------------------------+
|
||||||
|
|
||||||
|
- Creation (multiple arguments):
|
||||||
|
* ``cmd_runner_fmt.as_fixed("--one", "--two", "--three")``
|
||||||
|
- Examples:
|
||||||
|
+---------+--------------------------------------+
|
||||||
|
| Value | Outcome |
|
||||||
|
+=========+======================================+
|
||||||
|
| | * ``["--one", "--two", "--three"]`` |
|
||||||
|
+---------+--------------------------------------+
|
||||||
|
| False | * ``["--one", "--two", "--three"]`` |
|
||||||
|
+---------+--------------------------------------+
|
||||||
|
|
||||||
- Note:
|
- Note:
|
||||||
This is the only special case in which a value can be missing for the formatting function.
|
This is the only special case in which a value can be missing for the formatting function.
|
||||||
The example also comes from the code in `Quickstart`_.
|
The first example here comes from the code in `Quickstart`_.
|
||||||
In that case, the module has code to determine the command's version so that it can assert compatibility.
|
In that case, the module has code to determine the command's version so that it can assert compatibility.
|
||||||
There is no *value* to be passed for that CLI argument.
|
There is no *value* to be passed for that CLI argument.
|
||||||
|
|
||||||
@@ -265,7 +325,7 @@ In these descriptions ``value`` refers to the single parameter passed to the for
|
|||||||
|
|
||||||
- Creation:
|
- Creation:
|
||||||
``cmd_runner_fmt.as_map(dict(a=1, b=2, c=3), default=42)``
|
``cmd_runner_fmt.as_map(dict(a=1, b=2, c=3), default=42)``
|
||||||
- Example:
|
- Examples:
|
||||||
+---------------------+---------------+
|
+---------------------+---------------+
|
||||||
| Value | Outcome |
|
| Value | Outcome |
|
||||||
+=====================+===============+
|
+=====================+===============+
|
||||||
@@ -359,6 +419,8 @@ Settings that can be passed to the ``CmdRunner`` constructor are:
|
|||||||
Command to be executed. It can be a single string, the executable name, or a list
|
Command to be executed. It can be a single string, the executable name, or a list
|
||||||
of strings containing the executable name as the first element and, optionally, fixed parameters.
|
of strings containing the executable name as the first element and, optionally, fixed parameters.
|
||||||
Those parameters are used in all executions of the runner.
|
Those parameters are used in all executions of the runner.
|
||||||
|
The *executable* pointed by this parameter (whether itself when ``str`` or its first element when ``list``) is
|
||||||
|
processed using ``AnsibleModule.get_bin_path()`` *unless* it is an absolute path or contains the character ``/``.
|
||||||
- ``arg_formats: dict``
|
- ``arg_formats: dict``
|
||||||
Mapping of argument names to formatting functions.
|
Mapping of argument names to formatting functions.
|
||||||
- ``default_args_order: str``
|
- ``default_args_order: str``
|
||||||
@@ -394,6 +456,10 @@ When creating a context, the additional settings that can be passed to the call
|
|||||||
Defaults to ``False``.
|
Defaults to ``False``.
|
||||||
- ``check_mode_return: any``
|
- ``check_mode_return: any``
|
||||||
If ``check_mode_skip=True``, then return this value instead.
|
If ``check_mode_skip=True``, then return this value instead.
|
||||||
|
- valid named arguments to ``AnsibleModule.run_command()``
|
||||||
|
Other than ``args``, any valid argument to ``run_command()`` can be passed when setting up the run context.
|
||||||
|
For example, ``data`` can be used to send information to the command's standard input.
|
||||||
|
Or ``cwd`` can be used to run the command inside a specific working directory.
|
||||||
|
|
||||||
Additionally, any other valid parameters for ``AnsibleModule.run_command()`` may be passed, but unexpected behavior
|
Additionally, any other valid parameters for ``AnsibleModule.run_command()`` may be passed, but unexpected behavior
|
||||||
might occur if redefining options already present in the runner or its context creation. Use with caution.
|
might occur if redefining options already present in the runner or its context creation. Use with caution.
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ The same example from the Developer Guide would become:
|
|||||||
|
|
||||||
from ansible_collections.community.general.plugins.module_utils import deps
|
from ansible_collections.community.general.plugins.module_utils import deps
|
||||||
|
|
||||||
|
|
||||||
with deps.declare("foo"):
|
with deps.declare("foo"):
|
||||||
import foo
|
import foo
|
||||||
|
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ That is where ``ModuleHelper`` comes to assistance: a lot of that boilerplate co
|
|||||||
Quickstart
|
Quickstart
|
||||||
""""""""""
|
""""""""""
|
||||||
|
|
||||||
See the `example from Ansible documentation <https://docs.ansible.com/ansible/latest/dev_guide/developing_modules_general.html#creating-a-module>`_
|
See the `example from Ansible documentation <https://docs.ansible.com/projects/ansible/latest/dev_guide/developing_modules_general.html#creating-a-module>`_
|
||||||
written with ``ModuleHelper``.
|
written with ``ModuleHelper``.
|
||||||
But bear in mind that it does not showcase all of MH's features:
|
But bear in mind that it does not showcase all of MH's features:
|
||||||
|
|
||||||
@@ -75,14 +75,20 @@ section above, but there are more elements that will take part in it.
|
|||||||
|
|
||||||
from ansible_collections.community.general.plugins.module_utils.module_helper import ModuleHelper
|
from ansible_collections.community.general.plugins.module_utils.module_helper import ModuleHelper
|
||||||
|
|
||||||
|
|
||||||
class MyTest(ModuleHelper):
|
class MyTest(ModuleHelper):
|
||||||
|
# behavior for module paramaters ONLY, see below for further information
|
||||||
output_params = ()
|
output_params = ()
|
||||||
change_params = ()
|
change_params = ()
|
||||||
diff_params = ()
|
diff_params = ()
|
||||||
facts_name = None
|
|
||||||
facts_params = ()
|
facts_params = ()
|
||||||
|
|
||||||
|
facts_name = None # used if generating facts, from parameters or otherwise
|
||||||
|
|
||||||
|
# transitional variables for the new VarDict implementation, see information below
|
||||||
use_old_vardict = True
|
use_old_vardict = True
|
||||||
mute_vardict_deprecation = False
|
mute_vardict_deprecation = False
|
||||||
|
|
||||||
module = dict(
|
module = dict(
|
||||||
argument_spec=dict(...),
|
argument_spec=dict(...),
|
||||||
# ...
|
# ...
|
||||||
@@ -211,9 +217,10 @@ One of the attributes in that metadata marks the variable for output, and MH mak
|
|||||||
There are two ways to prevent that from happening:
|
There are two ways to prevent that from happening:
|
||||||
|
|
||||||
#. Set ``mute_vardict_deprecation = True`` and the deprecation will be silenced. If the module still uses the old ``VarDict``,
|
#. Set ``mute_vardict_deprecation = True`` and the deprecation will be silenced. If the module still uses the old ``VarDict``,
|
||||||
it will not be able to update to community.general 11.0.0 (Spring 2026) upon its release.
|
it will not be able to update to community.general 11.0.0 (Spring 2025) upon its release.
|
||||||
#. Set ``use_old_vardict = False`` to make the MH module use the new ``VarDict`` immediatelly.
|
#. Set ``use_old_vardict = False`` to make the MH module use the new ``VarDict`` immediately.
|
||||||
The new ``VarDict`` and its use is documented and this is the recommended way to handle this.
|
We strongly recommend you use the new ``VarDict``, for that you make sure to consult its documentation at
|
||||||
|
:ref:`ansible_collections.community.general.docsite.guide_vardict`.
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
@@ -233,6 +240,11 @@ If you want to include some module parameters in the output, list them in the ``
|
|||||||
output_params = ('state', 'name')
|
output_params = ('state', 'name')
|
||||||
...
|
...
|
||||||
|
|
||||||
|
.. important::
|
||||||
|
|
||||||
|
The variable names listed in ``output_params`` **must be module parameters**, as in parameters listed in the module's ``argument_spec``.
|
||||||
|
Names not found in ``argument_spec`` are silently ignored.
|
||||||
|
|
||||||
Another neat feature provided by MH by using ``VarDict`` is the automatic tracking of changes when setting the metadata ``change=True``.
|
Another neat feature provided by MH by using ``VarDict`` is the automatic tracking of changes when setting the metadata ``change=True``.
|
||||||
Again, to enable this feature for module parameters, you must list them in the ``change_params`` class variable.
|
Again, to enable this feature for module parameters, you must list them in the ``change_params`` class variable.
|
||||||
|
|
||||||
@@ -243,6 +255,11 @@ Again, to enable this feature for module parameters, you must list them in the `
|
|||||||
change_params = ('value', )
|
change_params = ('value', )
|
||||||
...
|
...
|
||||||
|
|
||||||
|
.. important::
|
||||||
|
|
||||||
|
The variable names listed in ``change_params`` **must be module parameters**, as in parameters listed in the module's ``argument_spec``.
|
||||||
|
Names not found in ``argument_spec`` are silently ignored.
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
|
|
||||||
See more about this in
|
See more about this in
|
||||||
@@ -256,9 +273,14 @@ With that, MH will automatically generate the diff output for variables that hav
|
|||||||
class MyTest(ModuleHelper):
|
class MyTest(ModuleHelper):
|
||||||
diff_params = ('value', )
|
diff_params = ('value', )
|
||||||
|
|
||||||
def __run__(self):
|
def __run__(self):
|
||||||
# example from community.general.gio_mime
|
# example from community.general.gio_mime
|
||||||
self.vars.set_meta("handler", initial_value=gio_mime_get(self.runner, self.vars.mime_type), diff=True, change=True)
|
self.vars.set_meta("handler", initial_value=gio_mime_get(self.runner, self.vars.mime_type), diff=True, change=True)
|
||||||
|
|
||||||
|
.. important::
|
||||||
|
|
||||||
|
The variable names listed in ``diff_params`` **must be module parameters**, as in parameters listed in the module's ``argument_spec``.
|
||||||
|
Names not found in ``argument_spec`` are silently ignored.
|
||||||
|
|
||||||
Moreover, if a module is set to return *facts* instead of return values, then again use the metadata ``fact=True`` and ``fact_params`` for module parameters.
|
Moreover, if a module is set to return *facts* instead of return values, then again use the metadata ``fact=True`` and ``fact_params`` for module parameters.
|
||||||
Additionally, you must specify ``facts_name``, as in:
|
Additionally, you must specify ``facts_name``, as in:
|
||||||
@@ -283,6 +305,11 @@ That generates an Ansible fact like:
|
|||||||
debug:
|
debug:
|
||||||
msg: Volume fact is {{ ansible_facts.volume_facts.volume }}
|
msg: Volume fact is {{ ansible_facts.volume_facts.volume }}
|
||||||
|
|
||||||
|
.. important::
|
||||||
|
|
||||||
|
The variable names listed in ``fact_params`` **must be module parameters**, as in parameters listed in the module's ``argument_spec``.
|
||||||
|
Names not found in ``argument_spec`` are silently ignored.
|
||||||
|
|
||||||
.. important::
|
.. important::
|
||||||
|
|
||||||
If ``facts_name`` is not set, the module does not generate any facts.
|
If ``facts_name`` is not set, the module does not generate any facts.
|
||||||
@@ -346,6 +373,8 @@ However, you can set output variables specifically for that exception, if you so
|
|||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
|
from ansible_collections.community.general.plugins.module_utils.module_helper import ModuleHelperException
|
||||||
|
|
||||||
def __init_module__(self):
|
def __init_module__(self):
|
||||||
if not complex_validation():
|
if not complex_validation():
|
||||||
self.do_raise("Validation failed!")
|
self.do_raise("Validation failed!")
|
||||||
@@ -354,11 +383,16 @@ However, you can set output variables specifically for that exception, if you so
|
|||||||
awesomeness = calculate_awesomeness()
|
awesomeness = calculate_awesomeness()
|
||||||
if awesomeness > 1000:
|
if awesomeness > 1000:
|
||||||
self.do_raise("Over awesome, I cannot handle it!", update_output={"awesomeness": awesomeness})
|
self.do_raise("Over awesome, I cannot handle it!", update_output={"awesomeness": awesomeness})
|
||||||
|
# which is just a convenience shortcut for
|
||||||
|
raise ModuleHelperException("...", update_output={...})
|
||||||
|
|
||||||
All exceptions derived from ``Exception`` are captured and translated into a ``fail_json()`` call.
|
All exceptions derived from ``Exception`` are captured and translated into a ``fail_json()`` call.
|
||||||
However, if you do want to call ``self.module.fail_json()`` yourself it will work,
|
However, if you do want to call ``self.module.fail_json()`` yourself it will work,
|
||||||
just keep in mind that there will be no automatic handling of output variables in that case.
|
just keep in mind that there will be no automatic handling of output variables in that case.
|
||||||
|
|
||||||
|
Behind the curtains, all ``do_raise()`` does is to raise a ``ModuleHelperException``.
|
||||||
|
If you want to create specialized error handling for your code, the best way is to extend that clas and raise it when needed.
|
||||||
|
|
||||||
.. _ansible_collections.community.general.docsite.guide_modulehelper.statemh:
|
.. _ansible_collections.community.general.docsite.guide_modulehelper.statemh:
|
||||||
|
|
||||||
StateModuleHelper
|
StateModuleHelper
|
||||||
@@ -461,6 +495,11 @@ Additionally, MH will also delegate:
|
|||||||
- ``diff_mode`` to ``self.module._diff``
|
- ``diff_mode`` to ``self.module._diff``
|
||||||
- ``verbosity`` to ``self.module._verbosity``
|
- ``verbosity`` to ``self.module._verbosity``
|
||||||
|
|
||||||
|
Starting in community.general 10.3.0, MH will also delegate the method ``debug`` to ``self.module``.
|
||||||
|
If any existing module already has a ``debug`` attribute defined, a warning message will be generated,
|
||||||
|
requesting it to be renamed. Upon the release of community.general 12.0.0, the delegation will be
|
||||||
|
preemptive and will override any existing method or property in the subclasses.
|
||||||
|
|
||||||
Decorators
|
Decorators
|
||||||
""""""""""
|
""""""""""
|
||||||
|
|
||||||
@@ -531,9 +570,9 @@ The other option is to use the parameter ``value``, in which case the method wil
|
|||||||
References
|
References
|
||||||
^^^^^^^^^^
|
^^^^^^^^^^
|
||||||
|
|
||||||
- `Ansible Developer Guide <https://docs.ansible.com/ansible/latest/dev_guide/index.html>`_
|
- `Ansible Developer Guide <https://docs.ansible.com/projects/ansible/latest/dev_guide/index.html>`_
|
||||||
- `Creating a module <https://docs.ansible.com/ansible/latest/dev_guide/developing_modules_general.html#creating-a-module>`_
|
- `Creating a module <https://docs.ansible.com/projects/ansible/latest/dev_guide/developing_modules_general.html#creating-a-module>`_
|
||||||
- `Returning ansible facts <https://docs.ansible.com/ansible/latest/reference_appendices/common_return_values.html#ansible-facts>`_
|
- `Returning ansible facts <https://docs.ansible.com/projects/ansible/latest/reference_appendices/common_return_values.html#ansible-facts>`_
|
||||||
- :ref:`ansible_collections.community.general.docsite.guide_vardict`
|
- :ref:`ansible_collections.community.general.docsite.guide_vardict`
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -67,16 +67,16 @@ The following code block is a simple playbook that creates one `Type 0 <https://
|
|||||||
hosts: localhost
|
hosts: localhost
|
||||||
tasks:
|
tasks:
|
||||||
|
|
||||||
- community.general.packet_sshkey:
|
- community.general.packet_sshkey:
|
||||||
key_file: ./id_rsa.pub
|
key_file: ./id_rsa.pub
|
||||||
label: tutorial key
|
label: tutorial key
|
||||||
|
|
||||||
- community.general.packet_device:
|
- community.general.packet_device:
|
||||||
project_id: <your_project_id>
|
project_id: <your_project_id>
|
||||||
hostnames: myserver
|
hostnames: myserver
|
||||||
operating_system: ubuntu_16_04
|
operating_system: ubuntu_16_04
|
||||||
plan: baremetal_0
|
plan: baremetal_0
|
||||||
facility: sjc1
|
facility: sjc1
|
||||||
|
|
||||||
After running ``ansible-playbook playbook_create.yml``, you should have a server provisioned on Packet. You can verify through a CLI or in the `Packet portal <https://app.packet.net/portal#/projects/list/table>`__.
|
After running ``ansible-playbook playbook_create.yml``, you should have a server provisioned on Packet. You can verify through a CLI or in the `Packet portal <https://app.packet.net/portal#/projects/list/table>`__.
|
||||||
|
|
||||||
@@ -110,10 +110,10 @@ If your playbook acts on existing Packet devices, you can only pass the ``hostna
|
|||||||
hosts: localhost
|
hosts: localhost
|
||||||
tasks:
|
tasks:
|
||||||
|
|
||||||
- community.general.packet_device:
|
- community.general.packet_device:
|
||||||
project_id: <your_project_id>
|
project_id: <your_project_id>
|
||||||
hostnames: myserver
|
hostnames: myserver
|
||||||
state: rebooted
|
state: rebooted
|
||||||
|
|
||||||
You can also identify specific Packet devices with the ``device_ids`` parameter. The device's UUID can be found in the `Packet Portal <https://app.packet.net/portal>`_ or by using a `CLI <https://www.packet.net/developers/integrations/>`_. The following playbook removes a Packet device using the ``device_ids`` field:
|
You can also identify specific Packet devices with the ``device_ids`` parameter. The device's UUID can be found in the `Packet Portal <https://app.packet.net/portal>`_ or by using a `CLI <https://www.packet.net/developers/integrations/>`_. The following playbook removes a Packet device using the ``device_ids`` field:
|
||||||
|
|
||||||
@@ -125,10 +125,10 @@ You can also identify specific Packet devices with the ``device_ids`` parameter.
|
|||||||
hosts: localhost
|
hosts: localhost
|
||||||
tasks:
|
tasks:
|
||||||
|
|
||||||
- community.general.packet_device:
|
- community.general.packet_device:
|
||||||
project_id: <your_project_id>
|
project_id: <your_project_id>
|
||||||
device_ids: <myserver_device_id>
|
device_ids: <myserver_device_id>
|
||||||
state: absent
|
state: absent
|
||||||
|
|
||||||
|
|
||||||
More Complex Playbooks
|
More Complex Playbooks
|
||||||
@@ -153,43 +153,43 @@ The following playbook will create an SSH key, 3 Packet servers, and then wait u
|
|||||||
hosts: localhost
|
hosts: localhost
|
||||||
tasks:
|
tasks:
|
||||||
|
|
||||||
- community.general.packet_sshkey:
|
- community.general.packet_sshkey:
|
||||||
key_file: ./id_rsa.pub
|
key_file: ./id_rsa.pub
|
||||||
label: new
|
label: new
|
||||||
|
|
||||||
- community.general.packet_device:
|
- community.general.packet_device:
|
||||||
hostnames: [coreos-one, coreos-two, coreos-three]
|
hostnames: [coreos-one, coreos-two, coreos-three]
|
||||||
operating_system: coreos_beta
|
operating_system: coreos_beta
|
||||||
plan: baremetal_0
|
plan: baremetal_0
|
||||||
facility: ewr1
|
facility: ewr1
|
||||||
project_id: <your_project_id>
|
project_id: <your_project_id>
|
||||||
wait_for_public_IPv: 4
|
wait_for_public_IPv: 4
|
||||||
user_data: |
|
user_data: |
|
||||||
#cloud-config
|
# cloud-config
|
||||||
coreos:
|
coreos:
|
||||||
etcd2:
|
etcd2:
|
||||||
discovery: https://discovery.etcd.io/<token>
|
discovery: https://discovery.etcd.io/<token>
|
||||||
advertise-client-urls: http://$private_ipv4:2379,http://$private_ipv4:4001
|
advertise-client-urls: http://$private_ipv4:2379,http://$private_ipv4:4001
|
||||||
initial-advertise-peer-urls: http://$private_ipv4:2380
|
initial-advertise-peer-urls: http://$private_ipv4:2380
|
||||||
listen-client-urls: http://0.0.0.0:2379,http://0.0.0.0:4001
|
listen-client-urls: http://0.0.0.0:2379,http://0.0.0.0:4001
|
||||||
listen-peer-urls: http://$private_ipv4:2380
|
listen-peer-urls: http://$private_ipv4:2380
|
||||||
fleet:
|
fleet:
|
||||||
public-ip: $private_ipv4
|
public-ip: $private_ipv4
|
||||||
units:
|
units:
|
||||||
- name: etcd2.service
|
- name: etcd2.service
|
||||||
command: start
|
command: start
|
||||||
- name: fleet.service
|
- name: fleet.service
|
||||||
command: start
|
command: start
|
||||||
register: newhosts
|
register: newhosts
|
||||||
|
|
||||||
- name: wait for ssh
|
- name: wait for ssh
|
||||||
ansible.builtin.wait_for:
|
ansible.builtin.wait_for:
|
||||||
delay: 1
|
delay: 1
|
||||||
host: "{{ item.public_ipv4 }}"
|
host: "{{ item.public_ipv4 }}"
|
||||||
port: 22
|
port: 22
|
||||||
state: started
|
state: started
|
||||||
timeout: 500
|
timeout: 500
|
||||||
loop: "{{ newhosts.results[0].devices }}"
|
loop: "{{ newhosts.results[0].devices }}"
|
||||||
|
|
||||||
|
|
||||||
As with most Ansible modules, the default states of the Packet modules are idempotent, meaning the resources in your project will remain the same after re-runs of a playbook. Thus, we can keep the ``packet_sshkey`` module call in our playbook. If the public key is already in your Packet account, the call will have no effect.
|
As with most Ansible modules, the default states of the Packet modules are idempotent, meaning the resources in your project will remain the same after re-runs of a playbook. Thus, we can keep the ``packet_sshkey`` module call in our playbook. If the public key is already in your Packet account, the call will have no effect.
|
||||||
|
|||||||
394
docs/docsite/rst/guide_uthelper.rst
Normal file
394
docs/docsite/rst/guide_uthelper.rst
Normal file
@@ -0,0 +1,394 @@
|
|||||||
|
..
|
||||||
|
Copyright (c) Ansible Project
|
||||||
|
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
.. _ansible_collections.community.general.docsite.guide_uthelper:
|
||||||
|
|
||||||
|
UTHelper Guide
|
||||||
|
==============
|
||||||
|
|
||||||
|
Introduction
|
||||||
|
^^^^^^^^^^^^
|
||||||
|
|
||||||
|
``UTHelper`` was written to reduce the boilerplate code used in unit tests for modules.
|
||||||
|
It was originally written to handle tests of modules that run external commands using ``AnsibleModule.run_command()``.
|
||||||
|
At the time of writing (Feb 2025) that remains the only type of tests you can use
|
||||||
|
``UTHelper`` for, but it aims to provide support for other types of interactions.
|
||||||
|
|
||||||
|
Until now, there are many different ways to implement unit tests that validate a module based on the execution of external commands. See some examples:
|
||||||
|
|
||||||
|
* `test_apk.py <https://github.com/ansible-collections/community.general/blob/10.3.0/tests/unit/plugins/modules/test_apk.py>`_ - A very simple one
|
||||||
|
* `test_bootc_manage.py <https://github.com/ansible-collections/community.general/blob/10.3.0/tests/unit/plugins/modules/test_bootc_manage.py>`_ -
|
||||||
|
This one has more test cases, but do notice how the code is repeated amongst them.
|
||||||
|
* `test_modprobe.py <https://github.com/ansible-collections/community.general/blob/10.3.0/tests/unit/plugins/modules/test_modprobe.py>`_ -
|
||||||
|
This one has 15 tests in it, but to achieve that it declares 8 classes repeating quite a lot of code.
|
||||||
|
|
||||||
|
As you can notice, there is no consistency in the way these tests are executed -
|
||||||
|
they all do the same thing eventually, but each one is written in a very distinct way.
|
||||||
|
|
||||||
|
``UTHelper`` aims to:
|
||||||
|
|
||||||
|
* provide a consistent idiom to define unit tests
|
||||||
|
* reduce the code to a bare minimal, and
|
||||||
|
* define tests as data instead
|
||||||
|
* allow the test cases definition to be expressed not only as a Python data structure but also as YAML content
|
||||||
|
|
||||||
|
Quickstart
|
||||||
|
""""""""""
|
||||||
|
|
||||||
|
To use UTHelper, your test module will need only a bare minimal of code:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
# tests/unit/plugin/modules/test_ansible_module.py
|
||||||
|
from ansible_collections.community.general.plugins.modules import ansible_module
|
||||||
|
from .uthelper import UTHelper, RunCommandMock
|
||||||
|
|
||||||
|
|
||||||
|
UTHelper.from_module(ansible_module, __name__, mocks=[RunCommandMock])
|
||||||
|
|
||||||
|
Then, in the test specification file, you have:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
# tests/unit/plugin/modules/test_ansible_module.yaml
|
||||||
|
test_cases:
|
||||||
|
- id: test_ansible_module
|
||||||
|
flags:
|
||||||
|
diff: true
|
||||||
|
input:
|
||||||
|
state: present
|
||||||
|
name: Roger the Shrubber
|
||||||
|
output:
|
||||||
|
shrubbery:
|
||||||
|
looks: nice
|
||||||
|
price: not too expensive
|
||||||
|
changed: true
|
||||||
|
diff:
|
||||||
|
before:
|
||||||
|
shrubbery: null
|
||||||
|
after:
|
||||||
|
shrubbery:
|
||||||
|
looks: nice
|
||||||
|
price: not too expensive
|
||||||
|
mocks:
|
||||||
|
run_command:
|
||||||
|
- command: [/testbin/shrubber, --version]
|
||||||
|
rc: 0
|
||||||
|
out: "2.80.0\n"
|
||||||
|
err: ''
|
||||||
|
- command: [/testbin/shrubber, --make-shrubbery]
|
||||||
|
rc: 0
|
||||||
|
out: 'Shrubbery created'
|
||||||
|
err: ''
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
If you prefer to pick a different YAML file for the test cases, or if you prefer to define them in plain Python,
|
||||||
|
you can use the convenience methods ``UTHelper.from_file()`` and ``UTHelper.from_spec()``, respectively.
|
||||||
|
See more details below.
|
||||||
|
|
||||||
|
|
||||||
|
Using ``UTHelper``
|
||||||
|
^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Test Module
|
||||||
|
"""""""""""
|
||||||
|
|
||||||
|
``UTHelper`` is **strictly for unit tests**. To use it, you import the ``.uthelper.UTHelper`` class.
|
||||||
|
As mentioned in different parts of this guide, there are three different mechanisms to load the test cases.
|
||||||
|
|
||||||
|
.. seealso::
|
||||||
|
|
||||||
|
See the UTHelper class reference below for API details on the three different mechanisms.
|
||||||
|
|
||||||
|
|
||||||
|
The easies and most recommended way of using ``UTHelper`` is literally the example shown.
|
||||||
|
See a real world example at
|
||||||
|
`test_gconftool2.py <https://github.com/ansible-collections/community.general/blob/10.3.0/tests/unit/plugins/modules/test_gconftool2.py>`_.
|
||||||
|
|
||||||
|
The ``from_module()`` method will pick the filename of the test module up (in the example above, ``tests/unit/plugins/modules/test_gconftool2.py``)
|
||||||
|
and it will search for ``tests/unit/plugins/modules/test_gconftool2.yaml`` (or ``.yml`` if that is not found).
|
||||||
|
In that file it will expect to find the test specification expressed in YAML format, conforming to the structure described below LINK LINK LINK.
|
||||||
|
|
||||||
|
If you prefer to read the test specifications a different file path, use ``from_file()`` passing the file handle for the YAML file.
|
||||||
|
|
||||||
|
And, if for any reason you prefer or need to pass the data structure rather than dealing with YAML files, use the ``from_spec()`` method.
|
||||||
|
A real world example for that can be found at
|
||||||
|
`test_snap.py <https://github.com/ansible-collections/community.general/blob/main/tests/unit/plugins/modules/test_snap.py>`_.
|
||||||
|
|
||||||
|
|
||||||
|
Test Specification
|
||||||
|
""""""""""""""""""
|
||||||
|
|
||||||
|
The structure of the test specification data is described below.
|
||||||
|
|
||||||
|
Top level
|
||||||
|
---------
|
||||||
|
|
||||||
|
At the top level there are two accepted keys:
|
||||||
|
|
||||||
|
- ``anchors: dict``
|
||||||
|
Optional. Placeholder for you to define YAML anchors that can be repeated in the test cases.
|
||||||
|
Its contents are never accessed directly by test Helper.
|
||||||
|
- ``test_cases: list``
|
||||||
|
Mandatory. List of test cases, see below for definition.
|
||||||
|
|
||||||
|
Test cases
|
||||||
|
----------
|
||||||
|
|
||||||
|
You write the test cases with five elements:
|
||||||
|
|
||||||
|
- ``id: str``
|
||||||
|
Mandatory. Used to identify the test case.
|
||||||
|
|
||||||
|
- ``flags: dict``
|
||||||
|
Optional. Flags controling the behavior of the test case. All flags are optional. Accepted flags:
|
||||||
|
|
||||||
|
* ``check: bool``: set to ``true`` if the module is to be executed in **check mode**.
|
||||||
|
* ``diff: bool``: set to ``true`` if the module is to be executed in **diff mode**.
|
||||||
|
* ``skip: str``: set the test case to be skipped, providing the message for ``pytest.skip()``.
|
||||||
|
* ``xfail: str``: set the test case to expect failure, providing the message for ``pytest.xfail()``.
|
||||||
|
|
||||||
|
- ``input: dict``
|
||||||
|
Optional. Parameters for the Ansible module, it can be empty.
|
||||||
|
|
||||||
|
- ``output: dict``
|
||||||
|
Optional. Expected return values from the Ansible module.
|
||||||
|
All RV names are used here are expected to be found in the module output, but not all RVs in the output must be here.
|
||||||
|
It can include special RVs such as ``changed`` and ``diff``.
|
||||||
|
It can be empty.
|
||||||
|
|
||||||
|
- ``mocks: dict``
|
||||||
|
Optional. Mocked interactions, ``run_command`` being the only one supported for now.
|
||||||
|
Each key in this dictionary refers to one subclass of ``TestCaseMock`` and its
|
||||||
|
structure is dictated by the ``TestCaseMock`` subclass implementation.
|
||||||
|
All keys are expected to be named using snake case, as in ``run_command``.
|
||||||
|
The ``TestCaseMock`` subclass is responsible for defining the name used in the test specification.
|
||||||
|
The structure for that specification is dependent on the implementing class.
|
||||||
|
See more details below for the implementation of ``RunCommandMock``
|
||||||
|
|
||||||
|
Example using YAML
|
||||||
|
------------------
|
||||||
|
|
||||||
|
We recommend you use ``UTHelper`` reading the test specifications from a YAML file.
|
||||||
|
See an example below of how one actually looks like (excerpt from ``test_opkg.yaml``):
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
---
|
||||||
|
anchors:
|
||||||
|
environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: false}
|
||||||
|
test_cases:
|
||||||
|
- id: install_zlibdev
|
||||||
|
input:
|
||||||
|
name: zlib-dev
|
||||||
|
state: present
|
||||||
|
output:
|
||||||
|
msg: installed 1 package(s)
|
||||||
|
mocks:
|
||||||
|
run_command:
|
||||||
|
- command: [/testbin/opkg, --version]
|
||||||
|
environ: *env-def
|
||||||
|
rc: 0
|
||||||
|
out: ''
|
||||||
|
err: ''
|
||||||
|
- command: [/testbin/opkg, list-installed, zlib-dev]
|
||||||
|
environ: *env-def
|
||||||
|
rc: 0
|
||||||
|
out: ''
|
||||||
|
err: ''
|
||||||
|
- command: [/testbin/opkg, install, zlib-dev]
|
||||||
|
environ: *env-def
|
||||||
|
rc: 0
|
||||||
|
out: |
|
||||||
|
Installing zlib-dev (1.2.11-6) to root...
|
||||||
|
Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib-dev_1.2.11-6_mips_24kc.ipk
|
||||||
|
Installing zlib (1.2.11-6) to root...
|
||||||
|
Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib_1.2.11-6_mips_24kc.ipk
|
||||||
|
Configuring zlib.
|
||||||
|
Configuring zlib-dev.
|
||||||
|
err: ''
|
||||||
|
- command: [/testbin/opkg, list-installed, zlib-dev]
|
||||||
|
environ: *env-def
|
||||||
|
rc: 0
|
||||||
|
out: |
|
||||||
|
zlib-dev - 1.2.11-6
|
||||||
|
err: ''
|
||||||
|
- id: install_zlibdev_present
|
||||||
|
input:
|
||||||
|
name: zlib-dev
|
||||||
|
state: present
|
||||||
|
output:
|
||||||
|
msg: package(s) already present
|
||||||
|
mocks:
|
||||||
|
run_command:
|
||||||
|
- command: [/testbin/opkg, --version]
|
||||||
|
environ: *env-def
|
||||||
|
rc: 0
|
||||||
|
out: ''
|
||||||
|
err: ''
|
||||||
|
- command: [/testbin/opkg, list-installed, zlib-dev]
|
||||||
|
environ: *env-def
|
||||||
|
rc: 0
|
||||||
|
out: |
|
||||||
|
zlib-dev - 1.2.11-6
|
||||||
|
err: ''
|
||||||
|
|
||||||
|
TestCaseMocks Specifications
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
The ``TestCaseMock`` subclass is free to define the expected data structure.
|
||||||
|
|
||||||
|
RunCommandMock Specification
|
||||||
|
""""""""""""""""""""""""""""
|
||||||
|
|
||||||
|
``RunCommandMock`` mocks can be specified with the key ``run_command`` and it expects a ``list`` in which elements follow the structure:
|
||||||
|
|
||||||
|
- ``command: Union[list, str]``
|
||||||
|
Mandatory. The command that is expected to be executed by the module. It corresponds to the parameter ``args`` of the ``AnsibleModule.run_command()`` call.
|
||||||
|
It can be either a list or a string, though the list form is generally recommended.
|
||||||
|
- ``environ: dict``
|
||||||
|
Mandatory. All other parameters passed to the ``AnsibleModule.run_command()`` call.
|
||||||
|
Most commonly used are ``environ_update`` and ``check_rc``.
|
||||||
|
Must include all parameters the Ansible module uses in the ``AnsibleModule.run_command()`` call, otherwise the test will fail.
|
||||||
|
- ``rc: int``
|
||||||
|
Mandatory. The return code for the command execution.
|
||||||
|
As per usual in bash scripting, a value of ``0`` means success, whereas any other number is an error code.
|
||||||
|
- ``out: str``
|
||||||
|
Mandatory. The *stdout* result of the command execution, as one single string containing zero or more lines.
|
||||||
|
- ``err: str``
|
||||||
|
Mandatory. The *stderr* result of the command execution, as one single string containing zero or more lines.
|
||||||
|
|
||||||
|
|
||||||
|
``UTHelper`` Reference
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
.. py:module:: .uthelper
|
||||||
|
|
||||||
|
.. py:class:: UTHelper
|
||||||
|
|
||||||
|
A class to encapsulate unit tests.
|
||||||
|
|
||||||
|
.. py:staticmethod:: from_spec(ansible_module, test_module, test_spec, mocks=None)
|
||||||
|
|
||||||
|
Creates an ``UTHelper`` instance from a given test specification.
|
||||||
|
|
||||||
|
:param ansible_module: The Ansible module to be tested.
|
||||||
|
:type ansible_module: module
|
||||||
|
:param test_module: The test module.
|
||||||
|
:type test_module: module
|
||||||
|
:param test_spec: The test specification.
|
||||||
|
:type test_spec: dict
|
||||||
|
:param mocks: List of ``TestCaseMocks`` to be used during testing. Currently only ``RunCommandMock`` exists.
|
||||||
|
:type mocks: list or None
|
||||||
|
:return: An ``UTHelper`` instance.
|
||||||
|
:rtype: UTHelper
|
||||||
|
|
||||||
|
Example usage of ``from_spec()``:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from ansible_collections.community.general.plugins.modules import ansible_module
|
||||||
|
from .uthelper import UTHelper, RunCommandMock
|
||||||
|
|
||||||
|
TEST_SPEC = dict(
|
||||||
|
test_cases=[
|
||||||
|
...
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
helper = UTHelper.from_spec(ansible_module, sys.modules[__name__], TEST_SPEC, mocks=[RunCommandMock])
|
||||||
|
|
||||||
|
.. py:staticmethod:: from_file(ansible_module, test_module, test_spec_filehandle, mocks=None)
|
||||||
|
|
||||||
|
Creates an ``UTHelper`` instance from a test specification file.
|
||||||
|
|
||||||
|
:param ansible_module: The Ansible module to be tested.
|
||||||
|
:type ansible_module: module
|
||||||
|
:param test_module: The test module.
|
||||||
|
:type test_module: module
|
||||||
|
:param test_spec_filehandle: A file handle to an file stream handle providing the test specification in YAML format.
|
||||||
|
:type test_spec_filehandle: file
|
||||||
|
:param mocks: List of ``TestCaseMocks`` to be used during testing. Currently only ``RunCommandMock`` exists.
|
||||||
|
:type mocks: list or None
|
||||||
|
:return: An ``UTHelper`` instance.
|
||||||
|
:rtype: UTHelper
|
||||||
|
|
||||||
|
Example usage of ``from_file()``:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from ansible_collections.community.general.plugins.modules import ansible_module
|
||||||
|
from .uthelper import UTHelper, RunCommandMock
|
||||||
|
|
||||||
|
with open("test_spec.yaml", "r") as test_spec_filehandle:
|
||||||
|
helper = UTHelper.from_file(ansible_module, sys.modules[__name__], test_spec_filehandle, mocks=[RunCommandMock])
|
||||||
|
|
||||||
|
.. py:staticmethod:: from_module(ansible_module, test_module_name, mocks=None)
|
||||||
|
|
||||||
|
Creates an ``UTHelper`` instance from a given Ansible module and test module.
|
||||||
|
|
||||||
|
:param ansible_module: The Ansible module to be tested.
|
||||||
|
:type ansible_module: module
|
||||||
|
:param test_module_name: The name of the test module. It works if passed ``__name__``.
|
||||||
|
:type test_module_name: str
|
||||||
|
:param mocks: List of ``TestCaseMocks`` to be used during testing. Currently only ``RunCommandMock`` exists.
|
||||||
|
:type mocks: list or None
|
||||||
|
:return: An ``UTHelper`` instance.
|
||||||
|
:rtype: UTHelper
|
||||||
|
|
||||||
|
Example usage of ``from_module()``:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from ansible_collections.community.general.plugins.modules import ansible_module
|
||||||
|
from .uthelper import UTHelper, RunCommandMock
|
||||||
|
|
||||||
|
# Example usage
|
||||||
|
helper = UTHelper.from_module(ansible_module, __name__, mocks=[RunCommandMock])
|
||||||
|
|
||||||
|
|
||||||
|
Creating TestCaseMocks
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
To create a new ``TestCaseMock`` you must extend that class and implement the relevant parts:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class ShrubberyMock(TestCaseMock):
|
||||||
|
# this name is mandatory, it is the name used in the test specification
|
||||||
|
name = "shrubbery"
|
||||||
|
|
||||||
|
def setup(self, mocker):
|
||||||
|
# perform setup, commonly using mocker to patch some other piece of code
|
||||||
|
...
|
||||||
|
|
||||||
|
def check(self, test_case, results):
|
||||||
|
# verify the tst execution met the expectations of the test case
|
||||||
|
# for example the function was called as many times as it should
|
||||||
|
...
|
||||||
|
|
||||||
|
def fixtures(self):
|
||||||
|
# returns a dict mapping names to pytest fixtures that should be used for the test case
|
||||||
|
# for example, in RunCommandMock it creates a fixture that patches AnsibleModule.get_bin_path
|
||||||
|
...
|
||||||
|
|
||||||
|
Caveats
|
||||||
|
^^^^^^^
|
||||||
|
|
||||||
|
Known issues/opportunities for improvement:
|
||||||
|
|
||||||
|
* Only one ``UTHelper`` per test module: UTHelper injects a test function with a fixed name into the module's namespace,
|
||||||
|
so placing a second ``UTHelper`` instance is going to overwrite the function created by the first one.
|
||||||
|
* Order of elements in module's namespace is not consistent across executions in Python 3.5, so if adding more tests to the test module
|
||||||
|
might make Test Helper add its function before or after the other test functions.
|
||||||
|
In the community.general collection the CI processes uses ``pytest-xdist`` to paralellize and distribute the tests,
|
||||||
|
and it requires the order of the tests to be consistent.
|
||||||
|
|
||||||
|
.. versionadded:: 7.5.0
|
||||||
@@ -51,7 +51,7 @@ And by the time the module is about to exit:
|
|||||||
|
|
||||||
That makes the return value of the module:
|
That makes the return value of the module:
|
||||||
|
|
||||||
.. code-block:: javascript
|
.. code-block:: json
|
||||||
|
|
||||||
{
|
{
|
||||||
"abc": 123,
|
"abc": 123,
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
community.general Test (Plugin) Guide
|
community.general Test (Plugin) Guide
|
||||||
=====================================
|
=====================================
|
||||||
|
|
||||||
The :ref:`community.general collection <plugins_in_community.general>` offers currently one test plugin.
|
The :anscollection:`community.general collection <community.general#collection>` offers currently one test plugin.
|
||||||
|
|
||||||
.. contents:: Topics
|
.. contents:: Topics
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
namespace: community
|
namespace: community
|
||||||
name: general
|
name: general
|
||||||
version: 9.4.0
|
version: 10.7.9
|
||||||
readme: README.md
|
readme: README.md
|
||||||
authors:
|
authors:
|
||||||
- Ansible (https://github.com/ansible)
|
- Ansible (https://github.com/ansible)
|
||||||
@@ -16,6 +16,6 @@ license_file: COPYING
|
|||||||
tags:
|
tags:
|
||||||
- community
|
- community
|
||||||
repository: https://github.com/ansible-collections/community.general
|
repository: https://github.com/ansible-collections/community.general
|
||||||
documentation: https://docs.ansible.com/ansible/latest/collections/community/general/
|
documentation: https://docs.ansible.com/projects/ansible/latest/collections/community/general/
|
||||||
homepage: https://github.com/ansible-collections/community.general
|
homepage: https://github.com/ansible-collections/community.general
|
||||||
issues: https://github.com/ansible-collections/community.general/issues
|
issues: https://github.com/ansible-collections/community.general/issues
|
||||||
|
|||||||
413
meta/runtime.yml
413
meta/runtime.yml
@@ -3,7 +3,7 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
requires_ansible: '>=2.13.0'
|
requires_ansible: '>=2.15.0'
|
||||||
action_groups:
|
action_groups:
|
||||||
consul:
|
consul:
|
||||||
- consul_agent_check
|
- consul_agent_check
|
||||||
@@ -16,6 +16,8 @@ action_groups:
|
|||||||
- consul_token
|
- consul_token
|
||||||
proxmox:
|
proxmox:
|
||||||
- proxmox
|
- proxmox
|
||||||
|
- proxmox_backup
|
||||||
|
- proxmox_backup_info
|
||||||
- proxmox_disk
|
- proxmox_disk
|
||||||
- proxmox_domain_info
|
- proxmox_domain_info
|
||||||
- proxmox_group_info
|
- proxmox_group_info
|
||||||
@@ -31,6 +33,34 @@ action_groups:
|
|||||||
- proxmox_template
|
- proxmox_template
|
||||||
- proxmox_user_info
|
- proxmox_user_info
|
||||||
- proxmox_vm_info
|
- proxmox_vm_info
|
||||||
|
keycloak:
|
||||||
|
- keycloak_authentication
|
||||||
|
- keycloak_authentication_required_actions
|
||||||
|
- keycloak_authz_authorization_scope
|
||||||
|
- keycloak_authz_custom_policy
|
||||||
|
- keycloak_authz_permission
|
||||||
|
- keycloak_authz_permission_info
|
||||||
|
- keycloak_client
|
||||||
|
- keycloak_client_rolemapping
|
||||||
|
- keycloak_client_rolescope
|
||||||
|
- keycloak_clientscope
|
||||||
|
- keycloak_clientscope_type
|
||||||
|
- keycloak_clientsecret_info
|
||||||
|
- keycloak_clientsecret_regenerate
|
||||||
|
- keycloak_clienttemplate
|
||||||
|
- keycloak_component
|
||||||
|
- keycloak_component_info
|
||||||
|
- keycloak_group
|
||||||
|
- keycloak_identity_provider
|
||||||
|
- keycloak_realm
|
||||||
|
- keycloak_realm_key
|
||||||
|
- keycloak_realm_keys_metadata_info
|
||||||
|
- keycloak_realm_rolemapping
|
||||||
|
- keycloak_role
|
||||||
|
- keycloak_user
|
||||||
|
- keycloak_user_federation
|
||||||
|
- keycloak_user_rolemapping
|
||||||
|
- keycloak_userprofile
|
||||||
plugin_routing:
|
plugin_routing:
|
||||||
callback:
|
callback:
|
||||||
actionable:
|
actionable:
|
||||||
@@ -44,7 +74,7 @@ plugin_routing:
|
|||||||
warning_text: Use the 'default' callback plugin with 'display_skipped_hosts
|
warning_text: Use the 'default' callback plugin with 'display_skipped_hosts
|
||||||
= no' option.
|
= no' option.
|
||||||
hipchat:
|
hipchat:
|
||||||
deprecation:
|
tombstone:
|
||||||
removal_version: 10.0.0
|
removal_version: 10.0.0
|
||||||
warning_text: The hipchat service has been discontinued and the self-hosted variant has been End of Life since 2020.
|
warning_text: The hipchat service has been discontinued and the self-hosted variant has been End of Life since 2020.
|
||||||
osx_say:
|
osx_say:
|
||||||
@@ -54,6 +84,11 @@ plugin_routing:
|
|||||||
removal_version: 2.0.0
|
removal_version: 2.0.0
|
||||||
warning_text: Use the 'default' callback plugin with 'display_failed_stderr
|
warning_text: Use the 'default' callback plugin with 'display_failed_stderr
|
||||||
= yes' option.
|
= yes' option.
|
||||||
|
yaml:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 12.0.0
|
||||||
|
warning_text: >-
|
||||||
|
The plugin has been superseded by the option `result_format=yaml` in callback plugin ansible.builtin.default from ansible-core 2.13 onwards.
|
||||||
connection:
|
connection:
|
||||||
docker:
|
docker:
|
||||||
redirect: community.docker.docker
|
redirect: community.docker.docker
|
||||||
@@ -64,6 +99,10 @@ plugin_routing:
|
|||||||
redirect: community.google.gcp_storage_file
|
redirect: community.google.gcp_storage_file
|
||||||
hashi_vault:
|
hashi_vault:
|
||||||
redirect: community.hashi_vault.hashi_vault
|
redirect: community.hashi_vault.hashi_vault
|
||||||
|
manifold:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: Company was acquired in 2021 and service was ceased afterwards.
|
||||||
nios:
|
nios:
|
||||||
redirect: infoblox.nios_modules.nios_lookup
|
redirect: infoblox.nios_modules.nios_lookup
|
||||||
nios_next_ip:
|
nios_next_ip:
|
||||||
@@ -71,136 +110,64 @@ plugin_routing:
|
|||||||
nios_next_network:
|
nios_next_network:
|
||||||
redirect: infoblox.nios_modules.nios_next_network
|
redirect: infoblox.nios_modules.nios_next_network
|
||||||
modules:
|
modules:
|
||||||
consul_acl:
|
|
||||||
deprecation:
|
|
||||||
removal_version: 10.0.0
|
|
||||||
warning_text: Use community.general.consul_token and/or community.general.consul_policy instead.
|
|
||||||
rax_cbs_attachments:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_cbs:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_cdb_database:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_cdb_user:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_cdb:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_clb_nodes:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_clb_ssl:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_clb:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_dns_record:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_dns:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_facts:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_files_objects:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_files:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_identity:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_keypair:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_meta:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_mon_alarm:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_mon_check:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_mon_entity:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_mon_notification_plan:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_mon_notification:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_network:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_queue:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_scaling_group:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rax_scaling_policy:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on the deprecated package pyrax.
|
|
||||||
rhn_channel:
|
|
||||||
deprecation:
|
|
||||||
removal_version: 10.0.0
|
|
||||||
warning_text: RHN is EOL, please contact the community.general maintainers
|
|
||||||
if still using this; see the module documentation for more details.
|
|
||||||
rhn_register:
|
|
||||||
deprecation:
|
|
||||||
removal_version: 10.0.0
|
|
||||||
warning_text: RHN is EOL, please contact the community.general maintainers
|
|
||||||
if still using this; see the module documentation for more details.
|
|
||||||
stackdriver:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This module relied on HTTPS APIs that do not exist anymore,
|
|
||||||
and any new development in the direction of providing an alternative should
|
|
||||||
happen in the context of the google.cloud collection.
|
|
||||||
ali_instance_facts:
|
ali_instance_facts:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.ali_instance_info instead.
|
warning_text: Use community.general.ali_instance_info instead.
|
||||||
|
atomic_container:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 13.0.0
|
||||||
|
warning_text: Project Atomic was sunset by the end of 2019.
|
||||||
|
atomic_host:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 13.0.0
|
||||||
|
warning_text: Project Atomic was sunset by the end of 2019.
|
||||||
|
atomic_image:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 13.0.0
|
||||||
|
warning_text: Project Atomic was sunset by the end of 2019.
|
||||||
cisco_spark:
|
cisco_spark:
|
||||||
redirect: community.general.cisco_webex
|
redirect: community.general.cisco_webex
|
||||||
|
clc_alert_policy:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: CenturyLink Cloud services went EOL in September 2023.
|
||||||
|
clc_blueprint_package:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: CenturyLink Cloud services went EOL in September 2023.
|
||||||
|
clc_firewall_policy:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: CenturyLink Cloud services went EOL in September 2023.
|
||||||
|
clc_group:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: CenturyLink Cloud services went EOL in September 2023.
|
||||||
|
clc_loadbalancer:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: CenturyLink Cloud services went EOL in September 2023.
|
||||||
|
clc_modify_server:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: CenturyLink Cloud services went EOL in September 2023.
|
||||||
|
clc_publicip:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: CenturyLink Cloud services went EOL in September 2023.
|
||||||
|
clc_server:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: CenturyLink Cloud services went EOL in September 2023.
|
||||||
|
clc_server_snapshot:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: CenturyLink Cloud services went EOL in September 2023.
|
||||||
|
consul_acl:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 10.0.0
|
||||||
|
warning_text: Use community.general.consul_token and/or community.general.consul_policy instead.
|
||||||
docker_compose:
|
docker_compose:
|
||||||
redirect: community.docker.docker_compose
|
redirect: community.docker.docker_compose
|
||||||
docker_config:
|
docker_config:
|
||||||
@@ -255,6 +222,10 @@ plugin_routing:
|
|||||||
redirect: community.docker.docker_volume
|
redirect: community.docker.docker_volume
|
||||||
docker_volume_info:
|
docker_volume_info:
|
||||||
redirect: community.docker.docker_volume_info
|
redirect: community.docker.docker_volume_info
|
||||||
|
facter:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 12.0.0
|
||||||
|
warning_text: Use community.general.facter_facts instead.
|
||||||
flowdock:
|
flowdock:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 9.0.0
|
removal_version: 9.0.0
|
||||||
@@ -348,6 +319,10 @@ plugin_routing:
|
|||||||
redirect: community.hrobot.firewall
|
redirect: community.hrobot.firewall
|
||||||
hetzner_firewall_info:
|
hetzner_firewall_info:
|
||||||
redirect: community.hrobot.firewall_info
|
redirect: community.hrobot.firewall_info
|
||||||
|
hipchat:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: The hipchat service has been discontinued and the self-hosted variant has been End of Life since 2020.
|
||||||
hpilo_facts:
|
hpilo_facts:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
@@ -669,6 +644,26 @@ plugin_routing:
|
|||||||
redirect: community.postgresql.postgresql_user
|
redirect: community.postgresql.postgresql_user
|
||||||
postgresql_user_obj_stat_info:
|
postgresql_user_obj_stat_info:
|
||||||
redirect: community.postgresql.postgresql_user_obj_stat_info
|
redirect: community.postgresql.postgresql_user_obj_stat_info
|
||||||
|
profitbricks:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: Supporting library is unsupported since 2021.
|
||||||
|
profitbricks_datacenter:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: Supporting library is unsupported since 2021.
|
||||||
|
profitbricks_nic:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: Supporting library is unsupported since 2021.
|
||||||
|
profitbricks_volume:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: Supporting library is unsupported since 2021.
|
||||||
|
profitbricks_volume_attachments:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: Supporting library is unsupported since 2021.
|
||||||
purefa_facts:
|
purefa_facts:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
@@ -681,10 +676,122 @@ plugin_routing:
|
|||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.python_requirements_info instead.
|
warning_text: Use community.general.python_requirements_info instead.
|
||||||
|
rax_cbs_attachments:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_cbs:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_cdb_database:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_cdb_user:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_cdb:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_clb_nodes:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_clb_ssl:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_clb:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_dns_record:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_dns:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_facts:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_files_objects:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_files:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_identity:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_keypair:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_meta:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_mon_alarm:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_mon_check:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_mon_entity:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_mon_notification_plan:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_mon_notification:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_network:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_queue:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_scaling_group:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
|
rax_scaling_policy:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on the deprecated package pyrax.
|
||||||
redfish_facts:
|
redfish_facts:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.redfish_info instead.
|
warning_text: Use community.general.redfish_info instead.
|
||||||
|
rhn_channel:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 10.0.0
|
||||||
|
warning_text: RHN is EOL.
|
||||||
|
rhn_register:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 10.0.0
|
||||||
|
warning_text: RHN is EOL.
|
||||||
sapcar_extract:
|
sapcar_extract:
|
||||||
redirect: community.sap_libs.sapcar_extract
|
redirect: community.sap_libs.sapcar_extract
|
||||||
sap_task_list_execute:
|
sap_task_list_execute:
|
||||||
@@ -717,6 +824,26 @@ plugin_routing:
|
|||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.scaleway_volume_info instead.
|
warning_text: Use community.general.scaleway_volume_info instead.
|
||||||
|
sensu_check:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 13.0.0
|
||||||
|
warning_text: Sensu Core and Sensu Enterprise products have been End of Life since 2019/20.
|
||||||
|
sensu_client:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 13.0.0
|
||||||
|
warning_text: Sensu Core and Sensu Enterprise products have been End of Life since 2019/20.
|
||||||
|
sensu_handler:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 13.0.0
|
||||||
|
warning_text: Sensu Core and Sensu Enterprise products have been End of Life since 2019/20.
|
||||||
|
sensu_silence:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 13.0.0
|
||||||
|
warning_text: Sensu Core and Sensu Enterprise products have been End of Life since 2019/20.
|
||||||
|
sensu_subscription:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 13.0.0
|
||||||
|
warning_text: Sensu Core and Sensu Enterprise products have been End of Life since 2019/20.
|
||||||
sf_account_manager:
|
sf_account_manager:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 2.0.0
|
removal_version: 2.0.0
|
||||||
@@ -741,6 +868,12 @@ plugin_routing:
|
|||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.smartos_image_info instead.
|
warning_text: Use community.general.smartos_image_info instead.
|
||||||
|
stackdriver:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module relied on HTTPS APIs that do not exist anymore,
|
||||||
|
and any new development in the direction of providing an alternative should
|
||||||
|
happen in the context of the google.cloud collection.
|
||||||
vertica_facts:
|
vertica_facts:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
@@ -775,11 +908,6 @@ plugin_routing:
|
|||||||
removal_version: 3.0.0
|
removal_version: 3.0.0
|
||||||
warning_text: Use community.general.xenserver_guest_info instead.
|
warning_text: Use community.general.xenserver_guest_info instead.
|
||||||
doc_fragments:
|
doc_fragments:
|
||||||
rackspace:
|
|
||||||
tombstone:
|
|
||||||
removal_version: 9.0.0
|
|
||||||
warning_text: This doc fragment was used by rax modules, that relied on the deprecated
|
|
||||||
package pyrax.
|
|
||||||
_gcp:
|
_gcp:
|
||||||
redirect: community.google._gcp
|
redirect: community.google._gcp
|
||||||
docker:
|
docker:
|
||||||
@@ -794,11 +922,16 @@ plugin_routing:
|
|||||||
redirect: infoblox.nios_modules.nios
|
redirect: infoblox.nios_modules.nios
|
||||||
postgresql:
|
postgresql:
|
||||||
redirect: community.postgresql.postgresql
|
redirect: community.postgresql.postgresql
|
||||||
module_utils:
|
purestorage:
|
||||||
rax:
|
deprecation:
|
||||||
|
removal_version: 12.0.0
|
||||||
|
warning_text: The modules for purestorage were removed in community.general 3.0.0, this document fragment was left behind.
|
||||||
|
rackspace:
|
||||||
tombstone:
|
tombstone:
|
||||||
removal_version: 9.0.0
|
removal_version: 9.0.0
|
||||||
warning_text: This module util relied on the deprecated package pyrax.
|
warning_text: This doc fragment was used by rax modules, that relied on the deprecated
|
||||||
|
package pyrax.
|
||||||
|
module_utils:
|
||||||
docker.common:
|
docker.common:
|
||||||
redirect: community.docker.common
|
redirect: community.docker.common
|
||||||
docker.swarm:
|
docker.swarm:
|
||||||
@@ -817,6 +950,14 @@ plugin_routing:
|
|||||||
redirect: infoblox.nios_modules.api
|
redirect: infoblox.nios_modules.api
|
||||||
postgresql:
|
postgresql:
|
||||||
redirect: community.postgresql.postgresql
|
redirect: community.postgresql.postgresql
|
||||||
|
pure:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 12.0.0
|
||||||
|
warning_text: The modules for purestorage were removed in community.general 3.0.0, this module util was left behind.
|
||||||
|
rax:
|
||||||
|
tombstone:
|
||||||
|
removal_version: 9.0.0
|
||||||
|
warning_text: This module util relied on the deprecated package pyrax.
|
||||||
remote_management.dellemc.dellemc_idrac:
|
remote_management.dellemc.dellemc_idrac:
|
||||||
redirect: dellemc.openmanage.dellemc_idrac
|
redirect: dellemc.openmanage.dellemc_idrac
|
||||||
remote_management.dellemc.ome:
|
remote_management.dellemc.ome:
|
||||||
@@ -828,6 +969,10 @@ plugin_routing:
|
|||||||
redirect: community.docker.docker_swarm
|
redirect: community.docker.docker_swarm
|
||||||
kubevirt:
|
kubevirt:
|
||||||
redirect: community.kubevirt.kubevirt
|
redirect: community.kubevirt.kubevirt
|
||||||
|
stackpath_compute:
|
||||||
|
deprecation:
|
||||||
|
removal_version: 11.0.0
|
||||||
|
warning_text: The company and the service were sunset in June 2024.
|
||||||
filter:
|
filter:
|
||||||
path_join:
|
path_join:
|
||||||
# The ansible.builtin.path_join filter has been added in ansible-base 2.10.
|
# The ansible.builtin.path_join filter has been added in ansible-base 2.10.
|
||||||
|
|||||||
38
noxfile.py
Normal file
38
noxfile.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
# SPDX-FileCopyrightText: 2025 Felix Fontein <felix@fontein.de>
|
||||||
|
|
||||||
|
# /// script
|
||||||
|
# dependencies = ["nox>=2025.02.09", "antsibull-nox"]
|
||||||
|
# ///
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import nox
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import antsibull_nox
|
||||||
|
except ImportError:
|
||||||
|
print("You need to install antsibull-nox in the same Python environment as nox.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
antsibull_nox.load_antsibull_nox_toml()
|
||||||
|
|
||||||
|
|
||||||
|
@nox.session(name="aliases", python=False, default=True)
|
||||||
|
def aliases(session: nox.Session) -> None:
|
||||||
|
session.run("python", "tests/sanity/extra/aliases.py")
|
||||||
|
|
||||||
|
|
||||||
|
@nox.session(name="botmeta", default=True)
|
||||||
|
def botmeta(session: nox.Session) -> None:
|
||||||
|
session.install("PyYAML", "voluptuous")
|
||||||
|
session.run("python", "tests/sanity/extra/botmeta.py")
|
||||||
|
|
||||||
|
|
||||||
|
# Allow to run the noxfile with `python noxfile.py`, `pipx run noxfile.py`, or similar.
|
||||||
|
# Requires nox >= 2025.02.09
|
||||||
|
if __name__ == "__main__":
|
||||||
|
nox.main()
|
||||||
@@ -3,8 +3,7 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
@@ -22,25 +21,33 @@ class ActionModule(ActionBase):
|
|||||||
_VALID_ARGS = frozenset(('path', 'state', 'table', 'noflush', 'counters', 'modprobe', 'ip_version', 'wait'))
|
_VALID_ARGS = frozenset(('path', 'state', 'table', 'noflush', 'counters', 'modprobe', 'ip_version', 'wait'))
|
||||||
DEFAULT_SUDOABLE = True
|
DEFAULT_SUDOABLE = True
|
||||||
|
|
||||||
MSG_ERROR__ASYNC_AND_POLL_NOT_ZERO = (
|
@staticmethod
|
||||||
"This module doesn't support async>0 and poll>0 when its 'state' param "
|
def msg_error__async_and_poll_not_zero(task_poll, task_async, max_timeout):
|
||||||
"is set to 'restored'. To enable its rollback feature (that needs the "
|
return (
|
||||||
"module to run asynchronously on the remote), please set task attribute "
|
"This module doesn't support async>0 and poll>0 when its 'state' param "
|
||||||
"'poll' (=%s) to 0, and 'async' (=%s) to a value >2 and not greater than "
|
"is set to 'restored'. To enable its rollback feature (that needs the "
|
||||||
"'ansible_timeout' (=%s) (recommended).")
|
"module to run asynchronously on the remote), please set task attribute "
|
||||||
MSG_WARNING__NO_ASYNC_IS_NO_ROLLBACK = (
|
f"'poll' (={task_poll}) to 0, and 'async' (={task_async}) to a value >2 and not greater than "
|
||||||
"Attempts to restore iptables state without rollback in case of mistake "
|
f"'ansible_timeout' (={max_timeout}) (recommended).")
|
||||||
"may lead the ansible controller to loose access to the hosts and never "
|
|
||||||
"regain it before fixing firewall rules through a serial console, or any "
|
@staticmethod
|
||||||
"other way except SSH. Please set task attribute 'poll' (=%s) to 0, and "
|
def msg_warning__no_async_is_no_rollback(task_poll, task_async, max_timeout):
|
||||||
"'async' (=%s) to a value >2 and not greater than 'ansible_timeout' (=%s) "
|
return (
|
||||||
"(recommended).")
|
"Attempts to restore iptables state without rollback in case of mistake "
|
||||||
MSG_WARNING__ASYNC_GREATER_THAN_TIMEOUT = (
|
"may lead the ansible controller to loose access to the hosts and never "
|
||||||
"You attempt to restore iptables state with rollback in case of mistake, "
|
"regain it before fixing firewall rules through a serial console, or any "
|
||||||
"but with settings that will lead this rollback to happen AFTER that the "
|
f"other way except SSH. Please set task attribute 'poll' (={task_poll}) to 0, and "
|
||||||
"controller will reach its own timeout. Please set task attribute 'poll' "
|
f"'async' (={task_async}) to a value >2 and not greater than 'ansible_timeout' (={max_timeout}) "
|
||||||
"(=%s) to 0, and 'async' (=%s) to a value >2 and not greater than "
|
"(recommended).")
|
||||||
"'ansible_timeout' (=%s) (recommended).")
|
|
||||||
|
@staticmethod
|
||||||
|
def msg_warning__async_greater_than_timeout(task_poll, task_async, max_timeout):
|
||||||
|
return (
|
||||||
|
"You attempt to restore iptables state with rollback in case of mistake, "
|
||||||
|
"but with settings that will lead this rollback to happen AFTER that the "
|
||||||
|
"controller will reach its own timeout. Please set task attribute 'poll' "
|
||||||
|
f"(={task_poll}) to 0, and 'async' (={task_async}) to a value >2 and not greater than "
|
||||||
|
f"'ansible_timeout' (={max_timeout}) (recommended).")
|
||||||
|
|
||||||
def _async_result(self, async_status_args, task_vars, timeout):
|
def _async_result(self, async_status_args, task_vars, timeout):
|
||||||
'''
|
'''
|
||||||
@@ -95,18 +102,18 @@ class ActionModule(ActionBase):
|
|||||||
if module_args.get('state', None) == 'restored':
|
if module_args.get('state', None) == 'restored':
|
||||||
if not wrap_async:
|
if not wrap_async:
|
||||||
if not check_mode:
|
if not check_mode:
|
||||||
display.warning(self.MSG_WARNING__NO_ASYNC_IS_NO_ROLLBACK % (
|
display.warning(self.msg_error__async_and_poll_not_zero(
|
||||||
task_poll,
|
task_poll,
|
||||||
task_async,
|
task_async,
|
||||||
max_timeout))
|
max_timeout))
|
||||||
elif task_poll:
|
elif task_poll:
|
||||||
raise AnsibleActionFail(self.MSG_ERROR__ASYNC_AND_POLL_NOT_ZERO % (
|
raise AnsibleActionFail(self.msg_warning__no_async_is_no_rollback(
|
||||||
task_poll,
|
task_poll,
|
||||||
task_async,
|
task_async,
|
||||||
max_timeout))
|
max_timeout))
|
||||||
else:
|
else:
|
||||||
if task_async > max_timeout and not check_mode:
|
if task_async > max_timeout and not check_mode:
|
||||||
display.warning(self.MSG_WARNING__ASYNC_GREATER_THAN_TIMEOUT % (
|
display.warning(self.msg_warning__async_greater_than_timeout(
|
||||||
task_poll,
|
task_poll,
|
||||||
task_async,
|
task_async,
|
||||||
max_timeout))
|
max_timeout))
|
||||||
@@ -119,10 +126,10 @@ class ActionModule(ActionBase):
|
|||||||
# remote and local sides (if not the same, make the loop
|
# remote and local sides (if not the same, make the loop
|
||||||
# longer on the controller); and set a backup file path.
|
# longer on the controller); and set a backup file path.
|
||||||
module_args['_timeout'] = task_async
|
module_args['_timeout'] = task_async
|
||||||
module_args['_back'] = '%s/iptables.state' % async_dir
|
module_args['_back'] = f'{async_dir}/iptables.state'
|
||||||
async_status_args = dict(mode='status')
|
async_status_args = dict(mode='status')
|
||||||
confirm_cmd = 'rm -f %s' % module_args['_back']
|
confirm_cmd = f"rm -f {module_args['_back']}"
|
||||||
starter_cmd = 'touch %s.starter' % module_args['_back']
|
starter_cmd = f"touch {module_args['_back']}.starter"
|
||||||
remaining_time = max(task_async, max_timeout)
|
remaining_time = max(task_async, max_timeout)
|
||||||
|
|
||||||
# do work!
|
# do work!
|
||||||
|
|||||||
@@ -5,9 +5,8 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
from ansible.errors import AnsibleError, AnsibleConnectionFailure
|
from ansible.errors import AnsibleError, AnsibleConnectionFailure
|
||||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||||
@@ -18,6 +17,10 @@ from ansible.utils.display import Display
|
|||||||
display = Display()
|
display = Display()
|
||||||
|
|
||||||
|
|
||||||
|
def fmt(mapping, key):
|
||||||
|
return to_native(mapping[key]).strip()
|
||||||
|
|
||||||
|
|
||||||
class TimedOutException(Exception):
|
class TimedOutException(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -84,31 +87,26 @@ class ActionModule(ActionBase):
|
|||||||
def get_distribution(self, task_vars):
|
def get_distribution(self, task_vars):
|
||||||
# FIXME: only execute the module if we don't already have the facts we need
|
# FIXME: only execute the module if we don't already have the facts we need
|
||||||
distribution = {}
|
distribution = {}
|
||||||
display.debug('{action}: running setup module to get distribution'.format(action=self._task.action))
|
display.debug(f'{self._task.action}: running setup module to get distribution')
|
||||||
module_output = self._execute_module(
|
module_output = self._execute_module(
|
||||||
task_vars=task_vars,
|
task_vars=task_vars,
|
||||||
module_name='ansible.legacy.setup',
|
module_name='ansible.legacy.setup',
|
||||||
module_args={'gather_subset': 'min'})
|
module_args={'gather_subset': 'min'})
|
||||||
try:
|
try:
|
||||||
if module_output.get('failed', False):
|
if module_output.get('failed', False):
|
||||||
raise AnsibleError('Failed to determine system distribution. {0}, {1}'.format(
|
raise AnsibleError(f"Failed to determine system distribution. {fmt(module_output, 'module_stdout')}, {fmt(module_output, 'module_stderr')}")
|
||||||
to_native(module_output['module_stdout']).strip(),
|
|
||||||
to_native(module_output['module_stderr']).strip()))
|
|
||||||
distribution['name'] = module_output['ansible_facts']['ansible_distribution'].lower()
|
distribution['name'] = module_output['ansible_facts']['ansible_distribution'].lower()
|
||||||
distribution['version'] = to_text(
|
distribution['version'] = to_text(
|
||||||
module_output['ansible_facts']['ansible_distribution_version'].split('.')[0])
|
module_output['ansible_facts']['ansible_distribution_version'].split('.')[0])
|
||||||
distribution['family'] = to_text(module_output['ansible_facts']['ansible_os_family'].lower())
|
distribution['family'] = to_text(module_output['ansible_facts']['ansible_os_family'].lower())
|
||||||
display.debug("{action}: distribution: {dist}".format(action=self._task.action, dist=distribution))
|
display.debug(f"{self._task.action}: distribution: {distribution}")
|
||||||
return distribution
|
return distribution
|
||||||
except KeyError as ke:
|
except KeyError as ke:
|
||||||
raise AnsibleError('Failed to get distribution information. Missing "{0}" in output.'.format(ke.args[0]))
|
raise AnsibleError(f'Failed to get distribution information. Missing "{ke.args[0]}" in output.')
|
||||||
|
|
||||||
def get_shutdown_command(self, task_vars, distribution):
|
def get_shutdown_command(self, task_vars, distribution):
|
||||||
def find_command(command, find_search_paths):
|
def find_command(command, find_search_paths):
|
||||||
display.debug('{action}: running find module looking in {paths} to get path for "{command}"'.format(
|
display.debug(f'{self._task.action}: running find module looking in {find_search_paths} to get path for "{command}"')
|
||||||
action=self._task.action,
|
|
||||||
command=command,
|
|
||||||
paths=find_search_paths))
|
|
||||||
find_result = self._execute_module(
|
find_result = self._execute_module(
|
||||||
task_vars=task_vars,
|
task_vars=task_vars,
|
||||||
# prevent collection search by calling with ansible.legacy (still allows library/ override of find)
|
# prevent collection search by calling with ansible.legacy (still allows library/ override of find)
|
||||||
@@ -130,42 +128,37 @@ class ActionModule(ActionBase):
|
|||||||
if is_string(search_paths):
|
if is_string(search_paths):
|
||||||
search_paths = [search_paths]
|
search_paths = [search_paths]
|
||||||
|
|
||||||
# Error if we didn't get a list
|
|
||||||
err_msg = "'search_paths' must be a string or flat list of strings, got {0}"
|
|
||||||
try:
|
try:
|
||||||
incorrect_type = any(not is_string(x) for x in search_paths)
|
incorrect_type = any(not is_string(x) for x in search_paths)
|
||||||
if not isinstance(search_paths, list) or incorrect_type:
|
if not isinstance(search_paths, list) or incorrect_type:
|
||||||
raise TypeError
|
raise TypeError
|
||||||
except TypeError:
|
except TypeError:
|
||||||
raise AnsibleError(err_msg.format(search_paths))
|
# Error if we didn't get a list
|
||||||
|
err_msg = f"'search_paths' must be a string or flat list of strings, got {search_paths}"
|
||||||
|
raise AnsibleError(err_msg)
|
||||||
|
|
||||||
full_path = find_command(shutdown_bin, search_paths) # find the path to the shutdown command
|
full_path = find_command(shutdown_bin, search_paths) # find the path to the shutdown command
|
||||||
if not full_path: # if we could not find the shutdown command
|
if not full_path: # if we could not find the shutdown command
|
||||||
display.vvv('Unable to find command "{0}" in search paths: {1}, will attempt a shutdown using systemd '
|
|
||||||
'directly.'.format(shutdown_bin, search_paths)) # tell the user we will try with systemd
|
# tell the user we will try with systemd
|
||||||
|
display.vvv(f'Unable to find command "{shutdown_bin}" in search paths: {search_paths}, will attempt a shutdown using systemd directly.')
|
||||||
systemctl_search_paths = ['/bin', '/usr/bin']
|
systemctl_search_paths = ['/bin', '/usr/bin']
|
||||||
full_path = find_command('systemctl', systemctl_search_paths) # find the path to the systemctl command
|
full_path = find_command('systemctl', systemctl_search_paths) # find the path to the systemctl command
|
||||||
if not full_path: # if we couldn't find systemctl
|
if not full_path: # if we couldn't find systemctl
|
||||||
raise AnsibleError(
|
raise AnsibleError(
|
||||||
'Could not find command "{0}" in search paths: {1} or systemctl command in search paths: {2}, unable to shutdown.'.
|
f'Could not find command "{shutdown_bin}" in search paths: {search_paths} or systemctl'
|
||||||
format(shutdown_bin, search_paths, systemctl_search_paths)) # we give up here
|
f' command in search paths: {systemctl_search_paths}, unable to shutdown.') # we give up here
|
||||||
else:
|
else:
|
||||||
return "{0} poweroff".format(full_path[0]) # done, since we cannot use args with systemd shutdown
|
return f"{full_path[0]} poweroff" # done, since we cannot use args with systemd shutdown
|
||||||
|
|
||||||
# systemd case taken care of, here we add args to the command
|
# systemd case taken care of, here we add args to the command
|
||||||
args = self._get_value_from_facts('SHUTDOWN_COMMAND_ARGS', distribution, 'DEFAULT_SHUTDOWN_COMMAND_ARGS')
|
args = self._get_value_from_facts('SHUTDOWN_COMMAND_ARGS', distribution, 'DEFAULT_SHUTDOWN_COMMAND_ARGS')
|
||||||
# Convert seconds to minutes. If less that 60, set it to 0.
|
# Convert seconds to minutes. If less that 60, set it to 0.
|
||||||
delay_sec = self.delay
|
delay_sec = self.delay
|
||||||
shutdown_message = self._task.args.get('msg', self.DEFAULT_SHUTDOWN_MESSAGE)
|
shutdown_message = self._task.args.get('msg', self.DEFAULT_SHUTDOWN_MESSAGE)
|
||||||
return '{0} {1}'. \
|
|
||||||
format(
|
af = args.format(delay_sec=delay_sec, delay_min=delay_sec // 60, message=shutdown_message)
|
||||||
full_path[0],
|
return f'{full_path[0]} {af}'
|
||||||
args.format(
|
|
||||||
delay_sec=delay_sec,
|
|
||||||
delay_min=delay_sec // 60,
|
|
||||||
message=shutdown_message
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def perform_shutdown(self, task_vars, distribution):
|
def perform_shutdown(self, task_vars, distribution):
|
||||||
result = {}
|
result = {}
|
||||||
@@ -174,9 +167,8 @@ class ActionModule(ActionBase):
|
|||||||
|
|
||||||
self.cleanup(force=True)
|
self.cleanup(force=True)
|
||||||
try:
|
try:
|
||||||
display.vvv("{action}: shutting down server...".format(action=self._task.action))
|
display.vvv(f"{self._task.action}: shutting down server...")
|
||||||
display.debug("{action}: shutting down server with command '{command}'".
|
display.debug(f"{self._task.action}: shutting down server with command '{shutdown_command_exec}'")
|
||||||
format(action=self._task.action, command=shutdown_command_exec))
|
|
||||||
if self._play_context.check_mode:
|
if self._play_context.check_mode:
|
||||||
shutdown_result['rc'] = 0
|
shutdown_result['rc'] = 0
|
||||||
else:
|
else:
|
||||||
@@ -184,16 +176,13 @@ class ActionModule(ActionBase):
|
|||||||
except AnsibleConnectionFailure as e:
|
except AnsibleConnectionFailure as e:
|
||||||
# If the connection is closed too quickly due to the system being shutdown, carry on
|
# If the connection is closed too quickly due to the system being shutdown, carry on
|
||||||
display.debug(
|
display.debug(
|
||||||
'{action}: AnsibleConnectionFailure caught and handled: {error}'.format(action=self._task.action,
|
f'{self._task.action}: AnsibleConnectionFailure caught and handled: {e}')
|
||||||
error=to_text(e)))
|
|
||||||
shutdown_result['rc'] = 0
|
shutdown_result['rc'] = 0
|
||||||
|
|
||||||
if shutdown_result['rc'] != 0:
|
if shutdown_result['rc'] != 0:
|
||||||
result['failed'] = True
|
result['failed'] = True
|
||||||
result['shutdown'] = False
|
result['shutdown'] = False
|
||||||
result['msg'] = "Shutdown command failed. Error was {stdout}, {stderr}".format(
|
result['msg'] = f"Shutdown command failed. Error was {fmt(shutdown_result, 'stdout')}, {fmt(shutdown_result, 'stderr')}"
|
||||||
stdout=to_native(shutdown_result['stdout'].strip()),
|
|
||||||
stderr=to_native(shutdown_result['stderr'].strip()))
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
result['failed'] = False
|
result['failed'] = False
|
||||||
@@ -206,7 +195,7 @@ class ActionModule(ActionBase):
|
|||||||
|
|
||||||
# If running with local connection, fail so we don't shutdown ourself
|
# If running with local connection, fail so we don't shutdown ourself
|
||||||
if self._connection.transport == 'local' and (not self._play_context.check_mode):
|
if self._connection.transport == 'local' and (not self._play_context.check_mode):
|
||||||
msg = 'Running {0} with local connection would shutdown the control node.'.format(self._task.action)
|
msg = f'Running {self._task.action} with local connection would shutdown the control node.'
|
||||||
return {'changed': False, 'elapsed': 0, 'shutdown': False, 'failed': True, 'msg': msg}
|
return {'changed': False, 'elapsed': 0, 'shutdown': False, 'failed': True, 'msg': msg}
|
||||||
|
|
||||||
if task_vars is None:
|
if task_vars is None:
|
||||||
|
|||||||
@@ -2,89 +2,91 @@
|
|||||||
# Copyright (c) 2018, Ansible Project
|
# Copyright (c) 2018, Ansible Project
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: doas
|
name: doas
|
||||||
short_description: Do As user
|
short_description: Do As user
|
||||||
|
description:
|
||||||
|
- This become plugins allows your remote/login user to execute commands as another user using the C(doas) utility.
|
||||||
|
author: Ansible Core Team
|
||||||
|
options:
|
||||||
|
become_user:
|
||||||
|
description: User you 'become' to execute the task.
|
||||||
|
type: string
|
||||||
|
ini:
|
||||||
|
- section: privilege_escalation
|
||||||
|
key: become_user
|
||||||
|
- section: doas_become_plugin
|
||||||
|
key: user
|
||||||
|
vars:
|
||||||
|
- name: ansible_become_user
|
||||||
|
- name: ansible_doas_user
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_BECOME_USER
|
||||||
|
- name: ANSIBLE_DOAS_USER
|
||||||
|
become_exe:
|
||||||
|
description: C(doas) executable.
|
||||||
|
type: string
|
||||||
|
default: doas
|
||||||
|
ini:
|
||||||
|
- section: privilege_escalation
|
||||||
|
key: become_exe
|
||||||
|
- section: doas_become_plugin
|
||||||
|
key: executable
|
||||||
|
vars:
|
||||||
|
- name: ansible_become_exe
|
||||||
|
- name: ansible_doas_exe
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_BECOME_EXE
|
||||||
|
- name: ANSIBLE_DOAS_EXE
|
||||||
|
become_flags:
|
||||||
|
description: Options to pass to C(doas).
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
ini:
|
||||||
|
- section: privilege_escalation
|
||||||
|
key: become_flags
|
||||||
|
- section: doas_become_plugin
|
||||||
|
key: flags
|
||||||
|
vars:
|
||||||
|
- name: ansible_become_flags
|
||||||
|
- name: ansible_doas_flags
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_BECOME_FLAGS
|
||||||
|
- name: ANSIBLE_DOAS_FLAGS
|
||||||
|
become_pass:
|
||||||
|
description: Password for C(doas) prompt.
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
vars:
|
||||||
|
- name: ansible_become_password
|
||||||
|
- name: ansible_become_pass
|
||||||
|
- name: ansible_doas_pass
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_BECOME_PASS
|
||||||
|
- name: ANSIBLE_DOAS_PASS
|
||||||
|
ini:
|
||||||
|
- section: doas_become_plugin
|
||||||
|
key: password
|
||||||
|
prompt_l10n:
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the doas utility.
|
- List of localized strings to match for prompt detection.
|
||||||
author: Ansible Core Team
|
- If empty we will use the built in one.
|
||||||
options:
|
type: list
|
||||||
become_user:
|
elements: string
|
||||||
description: User you 'become' to execute the task.
|
default: []
|
||||||
type: string
|
ini:
|
||||||
ini:
|
- section: doas_become_plugin
|
||||||
- section: privilege_escalation
|
key: localized_prompts
|
||||||
key: become_user
|
vars:
|
||||||
- section: doas_become_plugin
|
- name: ansible_doas_prompt_l10n
|
||||||
key: user
|
env:
|
||||||
vars:
|
- name: ANSIBLE_DOAS_PROMPT_L10N
|
||||||
- name: ansible_become_user
|
notes:
|
||||||
- name: ansible_doas_user
|
- This become plugin does not work when connection pipelining is enabled. With ansible-core 2.19+, using it automatically
|
||||||
env:
|
disables pipelining. On ansible-core 2.18 and before, pipelining must explicitly be disabled by the user.
|
||||||
- name: ANSIBLE_BECOME_USER
|
"""
|
||||||
- name: ANSIBLE_DOAS_USER
|
|
||||||
become_exe:
|
|
||||||
description: Doas executable.
|
|
||||||
type: string
|
|
||||||
default: doas
|
|
||||||
ini:
|
|
||||||
- section: privilege_escalation
|
|
||||||
key: become_exe
|
|
||||||
- section: doas_become_plugin
|
|
||||||
key: executable
|
|
||||||
vars:
|
|
||||||
- name: ansible_become_exe
|
|
||||||
- name: ansible_doas_exe
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_BECOME_EXE
|
|
||||||
- name: ANSIBLE_DOAS_EXE
|
|
||||||
become_flags:
|
|
||||||
description: Options to pass to doas.
|
|
||||||
type: string
|
|
||||||
default: ''
|
|
||||||
ini:
|
|
||||||
- section: privilege_escalation
|
|
||||||
key: become_flags
|
|
||||||
- section: doas_become_plugin
|
|
||||||
key: flags
|
|
||||||
vars:
|
|
||||||
- name: ansible_become_flags
|
|
||||||
- name: ansible_doas_flags
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_BECOME_FLAGS
|
|
||||||
- name: ANSIBLE_DOAS_FLAGS
|
|
||||||
become_pass:
|
|
||||||
description: Password for doas prompt.
|
|
||||||
type: string
|
|
||||||
required: false
|
|
||||||
vars:
|
|
||||||
- name: ansible_become_password
|
|
||||||
- name: ansible_become_pass
|
|
||||||
- name: ansible_doas_pass
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_BECOME_PASS
|
|
||||||
- name: ANSIBLE_DOAS_PASS
|
|
||||||
ini:
|
|
||||||
- section: doas_become_plugin
|
|
||||||
key: password
|
|
||||||
prompt_l10n:
|
|
||||||
description:
|
|
||||||
- List of localized strings to match for prompt detection.
|
|
||||||
- If empty we will use the built in one.
|
|
||||||
type: list
|
|
||||||
elements: string
|
|
||||||
default: []
|
|
||||||
ini:
|
|
||||||
- section: doas_become_plugin
|
|
||||||
key: localized_prompts
|
|
||||||
vars:
|
|
||||||
- name: ansible_doas_prompt_l10n
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_DOAS_PROMPT_L10N
|
|
||||||
'''
|
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
@@ -100,6 +102,10 @@ class BecomeModule(BecomeBase):
|
|||||||
fail = ('Permission denied',)
|
fail = ('Permission denied',)
|
||||||
missing = ('Authorization required',)
|
missing = ('Authorization required',)
|
||||||
|
|
||||||
|
# See https://github.com/ansible-collections/community.general/issues/9977,
|
||||||
|
# https://github.com/ansible/ansible/pull/78111
|
||||||
|
pipelining = False
|
||||||
|
|
||||||
def check_password_prompt(self, b_output):
|
def check_password_prompt(self, b_output):
|
||||||
''' checks if the expected password prompt exists in b_output '''
|
''' checks if the expected password prompt exists in b_output '''
|
||||||
|
|
||||||
@@ -125,9 +131,9 @@ class BecomeModule(BecomeBase):
|
|||||||
flags += ' -n'
|
flags += ' -n'
|
||||||
|
|
||||||
become_user = self.get_option('become_user')
|
become_user = self.get_option('become_user')
|
||||||
user = '-u %s' % (become_user) if become_user else ''
|
user = f'-u {become_user}' if become_user else ''
|
||||||
|
|
||||||
success_cmd = self._build_success_command(cmd, shell, noexe=True)
|
success_cmd = self._build_success_command(cmd, shell, noexe=True)
|
||||||
executable = getattr(shell, 'executable', shell.SHELL_FAMILY)
|
executable = getattr(shell, 'executable', shell.SHELL_FAMILY)
|
||||||
|
|
||||||
return '%s %s %s %s -c %s' % (become_exe, flags, user, executable, success_cmd)
|
return f'{become_exe} {flags} {user} {executable} -c {success_cmd}'
|
||||||
|
|||||||
@@ -2,75 +2,74 @@
|
|||||||
# Copyright (c) 2018, Ansible Project
|
# Copyright (c) 2018, Ansible Project
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: dzdo
|
name: dzdo
|
||||||
short_description: Centrify's Direct Authorize
|
short_description: Centrify's Direct Authorize
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the dzdo utility.
|
- This become plugins allows your remote/login user to execute commands as another user using the C(dzdo) utility.
|
||||||
author: Ansible Core Team
|
author: Ansible Core Team
|
||||||
options:
|
options:
|
||||||
become_user:
|
become_user:
|
||||||
description: User you 'become' to execute the task.
|
description: User you 'become' to execute the task.
|
||||||
type: string
|
type: string
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_user
|
key: become_user
|
||||||
- section: dzdo_become_plugin
|
- section: dzdo_become_plugin
|
||||||
key: user
|
key: user
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_user
|
- name: ansible_become_user
|
||||||
- name: ansible_dzdo_user
|
- name: ansible_dzdo_user
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_USER
|
- name: ANSIBLE_BECOME_USER
|
||||||
- name: ANSIBLE_DZDO_USER
|
- name: ANSIBLE_DZDO_USER
|
||||||
become_exe:
|
become_exe:
|
||||||
description: Dzdo executable.
|
description: C(dzdo) executable.
|
||||||
type: string
|
type: string
|
||||||
default: dzdo
|
default: dzdo
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_exe
|
key: become_exe
|
||||||
- section: dzdo_become_plugin
|
- section: dzdo_become_plugin
|
||||||
key: executable
|
key: executable
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_exe
|
- name: ansible_become_exe
|
||||||
- name: ansible_dzdo_exe
|
- name: ansible_dzdo_exe
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_EXE
|
- name: ANSIBLE_BECOME_EXE
|
||||||
- name: ANSIBLE_DZDO_EXE
|
- name: ANSIBLE_DZDO_EXE
|
||||||
become_flags:
|
become_flags:
|
||||||
description: Options to pass to dzdo.
|
description: Options to pass to C(dzdo).
|
||||||
type: string
|
type: string
|
||||||
default: -H -S -n
|
default: -H -S -n
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_flags
|
key: become_flags
|
||||||
- section: dzdo_become_plugin
|
- section: dzdo_become_plugin
|
||||||
key: flags
|
key: flags
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_flags
|
- name: ansible_become_flags
|
||||||
- name: ansible_dzdo_flags
|
- name: ansible_dzdo_flags
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_FLAGS
|
- name: ANSIBLE_BECOME_FLAGS
|
||||||
- name: ANSIBLE_DZDO_FLAGS
|
- name: ANSIBLE_DZDO_FLAGS
|
||||||
become_pass:
|
become_pass:
|
||||||
description: Options to pass to dzdo.
|
description: Options to pass to C(dzdo).
|
||||||
type: string
|
type: string
|
||||||
required: false
|
required: false
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_password
|
- name: ansible_become_password
|
||||||
- name: ansible_become_pass
|
- name: ansible_become_pass
|
||||||
- name: ansible_dzdo_pass
|
- name: ansible_dzdo_pass
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_PASS
|
- name: ANSIBLE_BECOME_PASS
|
||||||
- name: ANSIBLE_DZDO_PASS
|
- name: ANSIBLE_DZDO_PASS
|
||||||
ini:
|
ini:
|
||||||
- section: dzdo_become_plugin
|
- section: dzdo_become_plugin
|
||||||
key: password
|
key: password
|
||||||
'''
|
"""
|
||||||
|
|
||||||
from ansible.plugins.become import BecomeBase
|
from ansible.plugins.become import BecomeBase
|
||||||
|
|
||||||
@@ -92,10 +91,10 @@ class BecomeModule(BecomeBase):
|
|||||||
|
|
||||||
flags = self.get_option('become_flags')
|
flags = self.get_option('become_flags')
|
||||||
if self.get_option('become_pass'):
|
if self.get_option('become_pass'):
|
||||||
self.prompt = '[dzdo via ansible, key=%s] password:' % self._id
|
self.prompt = f'[dzdo via ansible, key={self._id}] password:'
|
||||||
flags = '%s -p "%s"' % (flags.replace('-n', ''), self.prompt)
|
flags = f"{flags.replace('-n', '')} -p \"{self.prompt}\""
|
||||||
|
|
||||||
become_user = self.get_option('become_user')
|
become_user = self.get_option('become_user')
|
||||||
user = '-u %s' % (become_user) if become_user else ''
|
user = f'-u {become_user}' if become_user else ''
|
||||||
|
|
||||||
return ' '.join([becomecmd, flags, user, self._build_success_command(cmd, shell)])
|
return f"{becomecmd} {flags} {user} {self._build_success_command(cmd, shell)}"
|
||||||
|
|||||||
@@ -2,90 +2,89 @@
|
|||||||
# Copyright (c) 2018, Ansible Project
|
# Copyright (c) 2018, Ansible Project
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: ksu
|
name: ksu
|
||||||
short_description: Kerberos substitute user
|
short_description: Kerberos substitute user
|
||||||
|
description:
|
||||||
|
- This become plugins allows your remote/login user to execute commands as another user using the C(ksu) utility.
|
||||||
|
author: Ansible Core Team
|
||||||
|
options:
|
||||||
|
become_user:
|
||||||
|
description: User you 'become' to execute the task.
|
||||||
|
type: string
|
||||||
|
ini:
|
||||||
|
- section: privilege_escalation
|
||||||
|
key: become_user
|
||||||
|
- section: ksu_become_plugin
|
||||||
|
key: user
|
||||||
|
vars:
|
||||||
|
- name: ansible_become_user
|
||||||
|
- name: ansible_ksu_user
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_BECOME_USER
|
||||||
|
- name: ANSIBLE_KSU_USER
|
||||||
|
required: true
|
||||||
|
become_exe:
|
||||||
|
description: C(ksu) executable.
|
||||||
|
type: string
|
||||||
|
default: ksu
|
||||||
|
ini:
|
||||||
|
- section: privilege_escalation
|
||||||
|
key: become_exe
|
||||||
|
- section: ksu_become_plugin
|
||||||
|
key: executable
|
||||||
|
vars:
|
||||||
|
- name: ansible_become_exe
|
||||||
|
- name: ansible_ksu_exe
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_BECOME_EXE
|
||||||
|
- name: ANSIBLE_KSU_EXE
|
||||||
|
become_flags:
|
||||||
|
description: Options to pass to C(ksu).
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
ini:
|
||||||
|
- section: privilege_escalation
|
||||||
|
key: become_flags
|
||||||
|
- section: ksu_become_plugin
|
||||||
|
key: flags
|
||||||
|
vars:
|
||||||
|
- name: ansible_become_flags
|
||||||
|
- name: ansible_ksu_flags
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_BECOME_FLAGS
|
||||||
|
- name: ANSIBLE_KSU_FLAGS
|
||||||
|
become_pass:
|
||||||
|
description: C(ksu) password.
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
vars:
|
||||||
|
- name: ansible_ksu_pass
|
||||||
|
- name: ansible_become_pass
|
||||||
|
- name: ansible_become_password
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_BECOME_PASS
|
||||||
|
- name: ANSIBLE_KSU_PASS
|
||||||
|
ini:
|
||||||
|
- section: ksu_become_plugin
|
||||||
|
key: password
|
||||||
|
prompt_l10n:
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the ksu utility.
|
- List of localized strings to match for prompt detection.
|
||||||
author: Ansible Core Team
|
- If empty we will use the built in one.
|
||||||
options:
|
type: list
|
||||||
become_user:
|
elements: string
|
||||||
description: User you 'become' to execute the task.
|
default: []
|
||||||
type: string
|
ini:
|
||||||
ini:
|
- section: ksu_become_plugin
|
||||||
- section: privilege_escalation
|
key: localized_prompts
|
||||||
key: become_user
|
vars:
|
||||||
- section: ksu_become_plugin
|
- name: ansible_ksu_prompt_l10n
|
||||||
key: user
|
env:
|
||||||
vars:
|
- name: ANSIBLE_KSU_PROMPT_L10N
|
||||||
- name: ansible_become_user
|
"""
|
||||||
- name: ansible_ksu_user
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_BECOME_USER
|
|
||||||
- name: ANSIBLE_KSU_USER
|
|
||||||
required: true
|
|
||||||
become_exe:
|
|
||||||
description: Su executable.
|
|
||||||
type: string
|
|
||||||
default: ksu
|
|
||||||
ini:
|
|
||||||
- section: privilege_escalation
|
|
||||||
key: become_exe
|
|
||||||
- section: ksu_become_plugin
|
|
||||||
key: executable
|
|
||||||
vars:
|
|
||||||
- name: ansible_become_exe
|
|
||||||
- name: ansible_ksu_exe
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_BECOME_EXE
|
|
||||||
- name: ANSIBLE_KSU_EXE
|
|
||||||
become_flags:
|
|
||||||
description: Options to pass to ksu.
|
|
||||||
type: string
|
|
||||||
default: ''
|
|
||||||
ini:
|
|
||||||
- section: privilege_escalation
|
|
||||||
key: become_flags
|
|
||||||
- section: ksu_become_plugin
|
|
||||||
key: flags
|
|
||||||
vars:
|
|
||||||
- name: ansible_become_flags
|
|
||||||
- name: ansible_ksu_flags
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_BECOME_FLAGS
|
|
||||||
- name: ANSIBLE_KSU_FLAGS
|
|
||||||
become_pass:
|
|
||||||
description: Ksu password.
|
|
||||||
type: string
|
|
||||||
required: false
|
|
||||||
vars:
|
|
||||||
- name: ansible_ksu_pass
|
|
||||||
- name: ansible_become_pass
|
|
||||||
- name: ansible_become_password
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_BECOME_PASS
|
|
||||||
- name: ANSIBLE_KSU_PASS
|
|
||||||
ini:
|
|
||||||
- section: ksu_become_plugin
|
|
||||||
key: password
|
|
||||||
prompt_l10n:
|
|
||||||
description:
|
|
||||||
- List of localized strings to match for prompt detection.
|
|
||||||
- If empty we will use the built in one.
|
|
||||||
type: list
|
|
||||||
elements: string
|
|
||||||
default: []
|
|
||||||
ini:
|
|
||||||
- section: ksu_become_plugin
|
|
||||||
key: localized_prompts
|
|
||||||
vars:
|
|
||||||
- name: ansible_ksu_prompt_l10n
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_KSU_PROMPT_L10N
|
|
||||||
'''
|
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
@@ -124,4 +123,4 @@ class BecomeModule(BecomeBase):
|
|||||||
|
|
||||||
flags = self.get_option('become_flags')
|
flags = self.get_option('become_flags')
|
||||||
user = self.get_option('become_user')
|
user = self.get_option('become_user')
|
||||||
return '%s %s %s -e %s ' % (exe, user, flags, self._build_success_command(cmd, shell))
|
return f'{exe} {user} {flags} -e {self._build_success_command(cmd, shell)} '
|
||||||
|
|||||||
@@ -2,99 +2,99 @@
|
|||||||
# Copyright (c) 2018, Ansible Project
|
# Copyright (c) 2018, Ansible Project
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: machinectl
|
name: machinectl
|
||||||
short_description: Systemd's machinectl privilege escalation
|
short_description: Systemd's machinectl privilege escalation
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the machinectl utility.
|
- This become plugins allows your remote/login user to execute commands as another user using the C(machinectl) utility.
|
||||||
author: Ansible Core Team
|
author: Ansible Core Team
|
||||||
options:
|
options:
|
||||||
become_user:
|
become_user:
|
||||||
description: User you 'become' to execute the task.
|
description: User you 'become' to execute the task.
|
||||||
type: string
|
type: string
|
||||||
default: ''
|
default: ''
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_user
|
key: become_user
|
||||||
- section: machinectl_become_plugin
|
- section: machinectl_become_plugin
|
||||||
key: user
|
key: user
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_user
|
- name: ansible_become_user
|
||||||
- name: ansible_machinectl_user
|
- name: ansible_machinectl_user
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_USER
|
- name: ANSIBLE_BECOME_USER
|
||||||
- name: ANSIBLE_MACHINECTL_USER
|
- name: ANSIBLE_MACHINECTL_USER
|
||||||
become_exe:
|
become_exe:
|
||||||
description: Machinectl executable.
|
description: C(machinectl) executable.
|
||||||
type: string
|
type: string
|
||||||
default: machinectl
|
default: machinectl
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_exe
|
key: become_exe
|
||||||
- section: machinectl_become_plugin
|
- section: machinectl_become_plugin
|
||||||
key: executable
|
key: executable
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_exe
|
- name: ansible_become_exe
|
||||||
- name: ansible_machinectl_exe
|
- name: ansible_machinectl_exe
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_EXE
|
- name: ANSIBLE_BECOME_EXE
|
||||||
- name: ANSIBLE_MACHINECTL_EXE
|
- name: ANSIBLE_MACHINECTL_EXE
|
||||||
become_flags:
|
become_flags:
|
||||||
description: Options to pass to machinectl.
|
description: Options to pass to C(machinectl).
|
||||||
type: string
|
type: string
|
||||||
default: ''
|
default: ''
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_flags
|
key: become_flags
|
||||||
- section: machinectl_become_plugin
|
- section: machinectl_become_plugin
|
||||||
key: flags
|
key: flags
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_flags
|
- name: ansible_become_flags
|
||||||
- name: ansible_machinectl_flags
|
- name: ansible_machinectl_flags
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_FLAGS
|
- name: ANSIBLE_BECOME_FLAGS
|
||||||
- name: ANSIBLE_MACHINECTL_FLAGS
|
- name: ANSIBLE_MACHINECTL_FLAGS
|
||||||
become_pass:
|
become_pass:
|
||||||
description: Password for machinectl.
|
description: Password for C(machinectl).
|
||||||
type: string
|
type: string
|
||||||
required: false
|
required: false
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_password
|
- name: ansible_become_password
|
||||||
- name: ansible_become_pass
|
- name: ansible_become_pass
|
||||||
- name: ansible_machinectl_pass
|
- name: ansible_machinectl_pass
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_PASS
|
- name: ANSIBLE_BECOME_PASS
|
||||||
- name: ANSIBLE_MACHINECTL_PASS
|
- name: ANSIBLE_MACHINECTL_PASS
|
||||||
ini:
|
ini:
|
||||||
- section: machinectl_become_plugin
|
- section: machinectl_become_plugin
|
||||||
key: password
|
key: password
|
||||||
notes:
|
notes:
|
||||||
- When not using this plugin with user V(root), it only works correctly with a polkit rule which will alter
|
- When not using this plugin with user V(root), it only works correctly with a polkit rule which will alter the behaviour
|
||||||
the behaviour of machinectl. This rule must alter the prompt behaviour to ask directly for the user credentials,
|
of machinectl. This rule must alter the prompt behaviour to ask directly for the user credentials, if the user is allowed
|
||||||
if the user is allowed to perform the action (take a look at the examples section).
|
to perform the action (take a look at the examples section). If such a rule is not present the plugin only work if it
|
||||||
If such a rule is not present the plugin only work if it is used in context with the root user,
|
is used in context with the root user, because then no further prompt will be shown by machinectl.
|
||||||
because then no further prompt will be shown by machinectl.
|
- This become plugin does not work when connection pipelining is enabled. With ansible-core 2.19+, using it automatically
|
||||||
'''
|
disables pipelining. On ansible-core 2.18 and before, pipelining must explicitly be disabled by the user.
|
||||||
|
"""
|
||||||
|
|
||||||
EXAMPLES = r'''
|
EXAMPLES = r"""
|
||||||
# A polkit rule needed to use the module with a non-root user.
|
# A polkit rule needed to use the module with a non-root user.
|
||||||
# See the Notes section for details.
|
# See the Notes section for details.
|
||||||
/etc/polkit-1/rules.d/60-machinectl-fast-user-auth.rules: |
|
/etc/polkit-1/rules.d/60-machinectl-fast-user-auth.rules: |-
|
||||||
polkit.addRule(function(action, subject) {
|
polkit.addRule(function(action, subject) {
|
||||||
if(action.id == "org.freedesktop.machine1.host-shell" &&
|
if(action.id == "org.freedesktop.machine1.host-shell" &&
|
||||||
subject.isInGroup("wheel")) {
|
subject.isInGroup("wheel")) {
|
||||||
return polkit.Result.AUTH_SELF_KEEP;
|
return polkit.Result.AUTH_SELF_KEEP;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
'''
|
"""
|
||||||
|
|
||||||
from re import compile as re_compile
|
from re import compile as re_compile
|
||||||
|
|
||||||
from ansible.plugins.become import BecomeBase
|
from ansible.plugins.become import BecomeBase
|
||||||
from ansible.module_utils._text import to_bytes
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
|
|
||||||
|
|
||||||
ansi_color_codes = re_compile(to_bytes(r'\x1B\[[0-9;]+m'))
|
ansi_color_codes = re_compile(to_bytes(r'\x1B\[[0-9;]+m'))
|
||||||
@@ -109,6 +109,10 @@ class BecomeModule(BecomeBase):
|
|||||||
success = ('==== AUTHENTICATION COMPLETE ====',)
|
success = ('==== AUTHENTICATION COMPLETE ====',)
|
||||||
require_tty = True # see https://github.com/ansible-collections/community.general/issues/6932
|
require_tty = True # see https://github.com/ansible-collections/community.general/issues/6932
|
||||||
|
|
||||||
|
# See https://github.com/ansible/ansible/issues/81254,
|
||||||
|
# https://github.com/ansible/ansible/pull/78111
|
||||||
|
pipelining = False
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def remove_ansi_codes(line):
|
def remove_ansi_codes(line):
|
||||||
return ansi_color_codes.sub(b"", line)
|
return ansi_color_codes.sub(b"", line)
|
||||||
@@ -123,7 +127,7 @@ class BecomeModule(BecomeBase):
|
|||||||
|
|
||||||
flags = self.get_option('become_flags')
|
flags = self.get_option('become_flags')
|
||||||
user = self.get_option('become_user')
|
user = self.get_option('become_user')
|
||||||
return '%s -q shell %s %s@ %s' % (become, flags, user, self._build_success_command(cmd, shell))
|
return f'{become} -q shell {flags} {user}@ {self._build_success_command(cmd, shell)}'
|
||||||
|
|
||||||
def check_success(self, b_output):
|
def check_success(self, b_output):
|
||||||
b_output = self.remove_ansi_codes(b_output)
|
b_output = self.remove_ansi_codes(b_output)
|
||||||
|
|||||||
@@ -2,87 +2,86 @@
|
|||||||
# Copyright (c) 2018, Ansible Project
|
# Copyright (c) 2018, Ansible Project
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: pbrun
|
name: pbrun
|
||||||
short_description: PowerBroker run
|
short_description: PowerBroker run
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the pbrun utility.
|
- This become plugins allows your remote/login user to execute commands as another user using the C(pbrun) utility.
|
||||||
author: Ansible Core Team
|
author: Ansible Core Team
|
||||||
options:
|
options:
|
||||||
become_user:
|
become_user:
|
||||||
description: User you 'become' to execute the task.
|
description: User you 'become' to execute the task.
|
||||||
type: string
|
type: string
|
||||||
default: ''
|
default: ''
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_user
|
key: become_user
|
||||||
- section: pbrun_become_plugin
|
- section: pbrun_become_plugin
|
||||||
key: user
|
key: user
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_user
|
- name: ansible_become_user
|
||||||
- name: ansible_pbrun_user
|
- name: ansible_pbrun_user
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_USER
|
- name: ANSIBLE_BECOME_USER
|
||||||
- name: ANSIBLE_PBRUN_USER
|
- name: ANSIBLE_PBRUN_USER
|
||||||
become_exe:
|
become_exe:
|
||||||
description: Sudo executable.
|
description: C(pbrun) executable.
|
||||||
type: string
|
type: string
|
||||||
default: pbrun
|
default: pbrun
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_exe
|
key: become_exe
|
||||||
- section: pbrun_become_plugin
|
- section: pbrun_become_plugin
|
||||||
key: executable
|
key: executable
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_exe
|
- name: ansible_become_exe
|
||||||
- name: ansible_pbrun_exe
|
- name: ansible_pbrun_exe
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_EXE
|
- name: ANSIBLE_BECOME_EXE
|
||||||
- name: ANSIBLE_PBRUN_EXE
|
- name: ANSIBLE_PBRUN_EXE
|
||||||
become_flags:
|
become_flags:
|
||||||
description: Options to pass to pbrun.
|
description: Options to pass to C(pbrun).
|
||||||
type: string
|
type: string
|
||||||
default: ''
|
default: ''
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_flags
|
key: become_flags
|
||||||
- section: pbrun_become_plugin
|
- section: pbrun_become_plugin
|
||||||
key: flags
|
key: flags
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_flags
|
- name: ansible_become_flags
|
||||||
- name: ansible_pbrun_flags
|
- name: ansible_pbrun_flags
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_FLAGS
|
- name: ANSIBLE_BECOME_FLAGS
|
||||||
- name: ANSIBLE_PBRUN_FLAGS
|
- name: ANSIBLE_PBRUN_FLAGS
|
||||||
become_pass:
|
become_pass:
|
||||||
description: Password for pbrun.
|
description: Password for C(pbrun).
|
||||||
type: string
|
type: string
|
||||||
required: false
|
required: false
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_password
|
- name: ansible_become_password
|
||||||
- name: ansible_become_pass
|
- name: ansible_become_pass
|
||||||
- name: ansible_pbrun_pass
|
- name: ansible_pbrun_pass
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_PASS
|
- name: ANSIBLE_BECOME_PASS
|
||||||
- name: ANSIBLE_PBRUN_PASS
|
- name: ANSIBLE_PBRUN_PASS
|
||||||
ini:
|
ini:
|
||||||
- section: pbrun_become_plugin
|
- section: pbrun_become_plugin
|
||||||
key: password
|
key: password
|
||||||
wrap_exe:
|
wrap_exe:
|
||||||
description: Toggle to wrap the command pbrun calls in C(shell -c) or not.
|
description: Toggle to wrap the command C(pbrun) calls in C(shell -c) or not.
|
||||||
default: false
|
default: false
|
||||||
type: bool
|
type: bool
|
||||||
ini:
|
ini:
|
||||||
- section: pbrun_become_plugin
|
- section: pbrun_become_plugin
|
||||||
key: wrap_execution
|
key: wrap_execution
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_pbrun_wrap_execution
|
- name: ansible_pbrun_wrap_execution
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_PBRUN_WRAP_EXECUTION
|
- name: ANSIBLE_PBRUN_WRAP_EXECUTION
|
||||||
'''
|
"""
|
||||||
|
|
||||||
from ansible.plugins.become import BecomeBase
|
from ansible.plugins.become import BecomeBase
|
||||||
|
|
||||||
@@ -103,7 +102,7 @@ class BecomeModule(BecomeBase):
|
|||||||
|
|
||||||
flags = self.get_option('become_flags')
|
flags = self.get_option('become_flags')
|
||||||
become_user = self.get_option('become_user')
|
become_user = self.get_option('become_user')
|
||||||
user = '-u %s' % (become_user) if become_user else ''
|
user = f'-u {become_user}' if become_user else ''
|
||||||
noexe = not self.get_option('wrap_exe')
|
noexe = not self.get_option('wrap_exe')
|
||||||
|
|
||||||
return ' '.join([become_exe, flags, user, self._build_success_command(cmd, shell, noexe=noexe)])
|
return f"{become_exe} {flags} {user} {self._build_success_command(cmd, shell, noexe=noexe)}"
|
||||||
|
|||||||
@@ -2,92 +2,91 @@
|
|||||||
# Copyright (c) 2018, Ansible Project
|
# Copyright (c) 2018, Ansible Project
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: pfexec
|
name: pfexec
|
||||||
short_description: profile based execution
|
short_description: profile based execution
|
||||||
|
description:
|
||||||
|
- This become plugins allows your remote/login user to execute commands as another user using the C(pfexec) utility.
|
||||||
|
author: Ansible Core Team
|
||||||
|
options:
|
||||||
|
become_user:
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the pfexec utility.
|
- User you 'become' to execute the task.
|
||||||
author: Ansible Core Team
|
- This plugin ignores this setting as pfexec uses its own C(exec_attr) to figure this out, but it is supplied here for
|
||||||
options:
|
Ansible to make decisions needed for the task execution, like file permissions.
|
||||||
become_user:
|
type: string
|
||||||
description:
|
default: root
|
||||||
- User you 'become' to execute the task.
|
ini:
|
||||||
- This plugin ignores this setting as pfexec uses it's own C(exec_attr) to figure this out,
|
- section: privilege_escalation
|
||||||
but it is supplied here for Ansible to make decisions needed for the task execution, like file permissions.
|
key: become_user
|
||||||
type: string
|
- section: pfexec_become_plugin
|
||||||
default: root
|
key: user
|
||||||
ini:
|
vars:
|
||||||
- section: privilege_escalation
|
- name: ansible_become_user
|
||||||
key: become_user
|
- name: ansible_pfexec_user
|
||||||
- section: pfexec_become_plugin
|
env:
|
||||||
key: user
|
- name: ANSIBLE_BECOME_USER
|
||||||
vars:
|
- name: ANSIBLE_PFEXEC_USER
|
||||||
- name: ansible_become_user
|
become_exe:
|
||||||
- name: ansible_pfexec_user
|
description: C(pfexec) executable.
|
||||||
env:
|
type: string
|
||||||
- name: ANSIBLE_BECOME_USER
|
default: pfexec
|
||||||
- name: ANSIBLE_PFEXEC_USER
|
ini:
|
||||||
become_exe:
|
- section: privilege_escalation
|
||||||
description: Sudo executable.
|
key: become_exe
|
||||||
type: string
|
- section: pfexec_become_plugin
|
||||||
default: pfexec
|
key: executable
|
||||||
ini:
|
vars:
|
||||||
- section: privilege_escalation
|
- name: ansible_become_exe
|
||||||
key: become_exe
|
- name: ansible_pfexec_exe
|
||||||
- section: pfexec_become_plugin
|
env:
|
||||||
key: executable
|
- name: ANSIBLE_BECOME_EXE
|
||||||
vars:
|
- name: ANSIBLE_PFEXEC_EXE
|
||||||
- name: ansible_become_exe
|
become_flags:
|
||||||
- name: ansible_pfexec_exe
|
description: Options to pass to C(pfexec).
|
||||||
env:
|
type: string
|
||||||
- name: ANSIBLE_BECOME_EXE
|
default: -H -S -n
|
||||||
- name: ANSIBLE_PFEXEC_EXE
|
ini:
|
||||||
become_flags:
|
- section: privilege_escalation
|
||||||
description: Options to pass to pfexec.
|
key: become_flags
|
||||||
type: string
|
- section: pfexec_become_plugin
|
||||||
default: -H -S -n
|
key: flags
|
||||||
ini:
|
vars:
|
||||||
- section: privilege_escalation
|
- name: ansible_become_flags
|
||||||
key: become_flags
|
- name: ansible_pfexec_flags
|
||||||
- section: pfexec_become_plugin
|
env:
|
||||||
key: flags
|
- name: ANSIBLE_BECOME_FLAGS
|
||||||
vars:
|
- name: ANSIBLE_PFEXEC_FLAGS
|
||||||
- name: ansible_become_flags
|
become_pass:
|
||||||
- name: ansible_pfexec_flags
|
description: C(pfexec) password.
|
||||||
env:
|
type: string
|
||||||
- name: ANSIBLE_BECOME_FLAGS
|
required: false
|
||||||
- name: ANSIBLE_PFEXEC_FLAGS
|
vars:
|
||||||
become_pass:
|
- name: ansible_become_password
|
||||||
description: pfexec password.
|
- name: ansible_become_pass
|
||||||
type: string
|
- name: ansible_pfexec_pass
|
||||||
required: false
|
env:
|
||||||
vars:
|
- name: ANSIBLE_BECOME_PASS
|
||||||
- name: ansible_become_password
|
- name: ANSIBLE_PFEXEC_PASS
|
||||||
- name: ansible_become_pass
|
ini:
|
||||||
- name: ansible_pfexec_pass
|
- section: pfexec_become_plugin
|
||||||
env:
|
key: password
|
||||||
- name: ANSIBLE_BECOME_PASS
|
wrap_exe:
|
||||||
- name: ANSIBLE_PFEXEC_PASS
|
description: Toggle to wrap the command C(pfexec) calls in C(shell -c) or not.
|
||||||
ini:
|
default: false
|
||||||
- section: pfexec_become_plugin
|
type: bool
|
||||||
key: password
|
ini:
|
||||||
wrap_exe:
|
- section: pfexec_become_plugin
|
||||||
description: Toggle to wrap the command pfexec calls in C(shell -c) or not.
|
key: wrap_execution
|
||||||
default: false
|
vars:
|
||||||
type: bool
|
- name: ansible_pfexec_wrap_execution
|
||||||
ini:
|
env:
|
||||||
- section: pfexec_become_plugin
|
- name: ANSIBLE_PFEXEC_WRAP_EXECUTION
|
||||||
key: wrap_execution
|
notes:
|
||||||
vars:
|
- This plugin ignores O(become_user) as pfexec uses its own C(exec_attr) to figure this out.
|
||||||
- name: ansible_pfexec_wrap_execution
|
"""
|
||||||
env:
|
|
||||||
- name: ANSIBLE_PFEXEC_WRAP_EXECUTION
|
|
||||||
notes:
|
|
||||||
- This plugin ignores O(become_user) as pfexec uses its own C(exec_attr) to figure this out.
|
|
||||||
'''
|
|
||||||
|
|
||||||
from ansible.plugins.become import BecomeBase
|
from ansible.plugins.become import BecomeBase
|
||||||
|
|
||||||
@@ -106,4 +105,4 @@ class BecomeModule(BecomeBase):
|
|||||||
|
|
||||||
flags = self.get_option('become_flags')
|
flags = self.get_option('become_flags')
|
||||||
noexe = not self.get_option('wrap_exe')
|
noexe = not self.get_option('wrap_exe')
|
||||||
return '%s %s %s' % (exe, flags, self._build_success_command(cmd, shell, noexe=noexe))
|
return f'{exe} {flags} {self._build_success_command(cmd, shell, noexe=noexe)}'
|
||||||
|
|||||||
@@ -2,66 +2,65 @@
|
|||||||
# Copyright (c) 2018, Ansible Project
|
# Copyright (c) 2018, Ansible Project
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: pmrun
|
name: pmrun
|
||||||
short_description: Privilege Manager run
|
short_description: Privilege Manager run
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the pmrun utility.
|
- This become plugins allows your remote/login user to execute commands as another user using the C(pmrun) utility.
|
||||||
author: Ansible Core Team
|
author: Ansible Core Team
|
||||||
options:
|
options:
|
||||||
become_exe:
|
become_exe:
|
||||||
description: Sudo executable
|
description: C(pmrun) executable.
|
||||||
type: string
|
type: string
|
||||||
default: pmrun
|
default: pmrun
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_exe
|
key: become_exe
|
||||||
- section: pmrun_become_plugin
|
- section: pmrun_become_plugin
|
||||||
key: executable
|
key: executable
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_exe
|
- name: ansible_become_exe
|
||||||
- name: ansible_pmrun_exe
|
- name: ansible_pmrun_exe
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_EXE
|
- name: ANSIBLE_BECOME_EXE
|
||||||
- name: ANSIBLE_PMRUN_EXE
|
- name: ANSIBLE_PMRUN_EXE
|
||||||
become_flags:
|
become_flags:
|
||||||
description: Options to pass to pmrun.
|
description: Options to pass to C(pmrun).
|
||||||
type: string
|
type: string
|
||||||
default: ''
|
default: ''
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_flags
|
key: become_flags
|
||||||
- section: pmrun_become_plugin
|
- section: pmrun_become_plugin
|
||||||
key: flags
|
key: flags
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_flags
|
- name: ansible_become_flags
|
||||||
- name: ansible_pmrun_flags
|
- name: ansible_pmrun_flags
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_FLAGS
|
- name: ANSIBLE_BECOME_FLAGS
|
||||||
- name: ANSIBLE_PMRUN_FLAGS
|
- name: ANSIBLE_PMRUN_FLAGS
|
||||||
become_pass:
|
become_pass:
|
||||||
description: pmrun password.
|
description: C(pmrun) password.
|
||||||
type: string
|
type: string
|
||||||
required: false
|
required: false
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_password
|
- name: ansible_become_password
|
||||||
- name: ansible_become_pass
|
- name: ansible_become_pass
|
||||||
- name: ansible_pmrun_pass
|
- name: ansible_pmrun_pass
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_PASS
|
- name: ANSIBLE_BECOME_PASS
|
||||||
- name: ANSIBLE_PMRUN_PASS
|
- name: ANSIBLE_PMRUN_PASS
|
||||||
ini:
|
ini:
|
||||||
- section: pmrun_become_plugin
|
- section: pmrun_become_plugin
|
||||||
key: password
|
key: password
|
||||||
notes:
|
notes:
|
||||||
- This plugin ignores the become_user supplied and uses pmrun's own configuration to select the user.
|
- This plugin ignores the C(become_user) supplied and uses C(pmrun)'s own configuration to select the user.
|
||||||
'''
|
"""
|
||||||
|
|
||||||
|
from shlex import quote as shlex_quote
|
||||||
from ansible.plugins.become import BecomeBase
|
from ansible.plugins.become import BecomeBase
|
||||||
from ansible.module_utils.six.moves import shlex_quote
|
|
||||||
|
|
||||||
|
|
||||||
class BecomeModule(BecomeBase):
|
class BecomeModule(BecomeBase):
|
||||||
@@ -78,4 +77,4 @@ class BecomeModule(BecomeBase):
|
|||||||
become = self.get_option('become_exe')
|
become = self.get_option('become_exe')
|
||||||
|
|
||||||
flags = self.get_option('become_flags')
|
flags = self.get_option('become_flags')
|
||||||
return '%s %s %s' % (become, flags, shlex_quote(self._build_success_command(cmd, shell)))
|
return f'{become} {flags} {shlex_quote(self._build_success_command(cmd, shell))}'
|
||||||
|
|||||||
@@ -3,72 +3,71 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import annotations
|
||||||
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = """
|
DOCUMENTATION = r"""
|
||||||
name: run0
|
name: run0
|
||||||
short_description: Systemd's run0
|
short_description: Systemd's run0
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the C(run0) utility.
|
- This become plugins allows your remote/login user to execute commands as another user using the C(run0) utility.
|
||||||
author:
|
author:
|
||||||
- Thomas Sjögren (@konstruktoid)
|
- Thomas Sjögren (@konstruktoid)
|
||||||
version_added: '9.0.0'
|
version_added: '9.0.0'
|
||||||
options:
|
options:
|
||||||
become_user:
|
become_user:
|
||||||
description: User you 'become' to execute the task.
|
description: User you 'become' to execute the task.
|
||||||
default: root
|
default: root
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_user
|
key: become_user
|
||||||
- section: run0_become_plugin
|
- section: run0_become_plugin
|
||||||
key: user
|
key: user
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_user
|
- name: ansible_become_user
|
||||||
- name: ansible_run0_user
|
- name: ansible_run0_user
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_USER
|
- name: ANSIBLE_BECOME_USER
|
||||||
- name: ANSIBLE_RUN0_USER
|
- name: ANSIBLE_RUN0_USER
|
||||||
type: string
|
type: string
|
||||||
become_exe:
|
become_exe:
|
||||||
description: The C(run0) executable.
|
description: C(run0) executable.
|
||||||
default: run0
|
default: run0
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_exe
|
key: become_exe
|
||||||
- section: run0_become_plugin
|
- section: run0_become_plugin
|
||||||
key: executable
|
key: executable
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_exe
|
- name: ansible_become_exe
|
||||||
- name: ansible_run0_exe
|
- name: ansible_run0_exe
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_EXE
|
- name: ANSIBLE_BECOME_EXE
|
||||||
- name: ANSIBLE_RUN0_EXE
|
- name: ANSIBLE_RUN0_EXE
|
||||||
type: string
|
type: string
|
||||||
become_flags:
|
become_flags:
|
||||||
description: Options to pass to run0.
|
description: Options to pass to C(run0).
|
||||||
default: ''
|
default: ''
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_flags
|
key: become_flags
|
||||||
- section: run0_become_plugin
|
- section: run0_become_plugin
|
||||||
key: flags
|
key: flags
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_flags
|
- name: ansible_become_flags
|
||||||
- name: ansible_run0_flags
|
- name: ansible_run0_flags
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_FLAGS
|
- name: ANSIBLE_BECOME_FLAGS
|
||||||
- name: ANSIBLE_RUN0_FLAGS
|
- name: ANSIBLE_RUN0_FLAGS
|
||||||
type: string
|
type: string
|
||||||
notes:
|
notes:
|
||||||
- This plugin will only work when a polkit rule is in place.
|
- This plugin will only work when a C(polkit) rule is in place.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
EXAMPLES = r"""
|
EXAMPLES = r"""
|
||||||
# An example polkit rule that allows the user 'ansible' in the 'wheel' group
|
# An example polkit rule that allows the user 'ansible' in the 'wheel' group
|
||||||
# to execute commands using run0 without authentication.
|
# to execute commands using run0 without authentication.
|
||||||
/etc/polkit-1/rules.d/60-run0-fast-user-auth.rules: |
|
/etc/polkit-1/rules.d/60-run0-fast-user-auth.rules: |-
|
||||||
polkit.addRule(function(action, subject) {
|
polkit.addRule(function(action, subject) {
|
||||||
if(action.id == "org.freedesktop.systemd1.manage-units" &&
|
if(action.id == "org.freedesktop.systemd1.manage-units" &&
|
||||||
subject.isInGroup("wheel") &&
|
subject.isInGroup("wheel") &&
|
||||||
@@ -81,7 +80,7 @@ EXAMPLES = r"""
|
|||||||
from re import compile as re_compile
|
from re import compile as re_compile
|
||||||
|
|
||||||
from ansible.plugins.become import BecomeBase
|
from ansible.plugins.become import BecomeBase
|
||||||
from ansible.module_utils._text import to_bytes
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
|
|
||||||
ansi_color_codes = re_compile(to_bytes(r"\x1B\[[0-9;]+m"))
|
ansi_color_codes = re_compile(to_bytes(r"\x1B\[[0-9;]+m"))
|
||||||
|
|
||||||
|
|||||||
@@ -2,76 +2,75 @@
|
|||||||
# Copyright (c) 2018, Ansible Project
|
# Copyright (c) 2018, Ansible Project
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: sesu
|
name: sesu
|
||||||
short_description: CA Privileged Access Manager
|
short_description: CA Privileged Access Manager
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the sesu utility.
|
- This become plugins allows your remote/login user to execute commands as another user using the C(sesu) utility.
|
||||||
author: ansible (@nekonyuu)
|
author: ansible (@nekonyuu)
|
||||||
options:
|
options:
|
||||||
become_user:
|
become_user:
|
||||||
description: User you 'become' to execute the task.
|
description: User you 'become' to execute the task.
|
||||||
type: string
|
type: string
|
||||||
default: ''
|
default: ''
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_user
|
key: become_user
|
||||||
- section: sesu_become_plugin
|
- section: sesu_become_plugin
|
||||||
key: user
|
key: user
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_user
|
- name: ansible_become_user
|
||||||
- name: ansible_sesu_user
|
- name: ansible_sesu_user
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_USER
|
- name: ANSIBLE_BECOME_USER
|
||||||
- name: ANSIBLE_SESU_USER
|
- name: ANSIBLE_SESU_USER
|
||||||
become_exe:
|
become_exe:
|
||||||
description: sesu executable.
|
description: C(sesu) executable.
|
||||||
type: string
|
type: string
|
||||||
default: sesu
|
default: sesu
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_exe
|
key: become_exe
|
||||||
- section: sesu_become_plugin
|
- section: sesu_become_plugin
|
||||||
key: executable
|
key: executable
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_exe
|
- name: ansible_become_exe
|
||||||
- name: ansible_sesu_exe
|
- name: ansible_sesu_exe
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_EXE
|
- name: ANSIBLE_BECOME_EXE
|
||||||
- name: ANSIBLE_SESU_EXE
|
- name: ANSIBLE_SESU_EXE
|
||||||
become_flags:
|
become_flags:
|
||||||
description: Options to pass to sesu.
|
description: Options to pass to C(sesu).
|
||||||
type: string
|
type: string
|
||||||
default: -H -S -n
|
default: -H -S -n
|
||||||
ini:
|
ini:
|
||||||
- section: privilege_escalation
|
- section: privilege_escalation
|
||||||
key: become_flags
|
key: become_flags
|
||||||
- section: sesu_become_plugin
|
- section: sesu_become_plugin
|
||||||
key: flags
|
key: flags
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_flags
|
- name: ansible_become_flags
|
||||||
- name: ansible_sesu_flags
|
- name: ansible_sesu_flags
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_FLAGS
|
- name: ANSIBLE_BECOME_FLAGS
|
||||||
- name: ANSIBLE_SESU_FLAGS
|
- name: ANSIBLE_SESU_FLAGS
|
||||||
become_pass:
|
become_pass:
|
||||||
description: Password to pass to sesu.
|
description: Password to pass to C(sesu).
|
||||||
type: string
|
type: string
|
||||||
required: false
|
required: false
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_become_password
|
- name: ansible_become_password
|
||||||
- name: ansible_become_pass
|
- name: ansible_become_pass
|
||||||
- name: ansible_sesu_pass
|
- name: ansible_sesu_pass
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_BECOME_PASS
|
- name: ANSIBLE_BECOME_PASS
|
||||||
- name: ANSIBLE_SESU_PASS
|
- name: ANSIBLE_SESU_PASS
|
||||||
ini:
|
ini:
|
||||||
- section: sesu_become_plugin
|
- section: sesu_become_plugin
|
||||||
key: password
|
key: password
|
||||||
'''
|
"""
|
||||||
|
|
||||||
from ansible.plugins.become import BecomeBase
|
from ansible.plugins.become import BecomeBase
|
||||||
|
|
||||||
@@ -93,4 +92,4 @@ class BecomeModule(BecomeBase):
|
|||||||
|
|
||||||
flags = self.get_option('become_flags')
|
flags = self.get_option('become_flags')
|
||||||
user = self.get_option('become_user')
|
user = self.get_option('become_user')
|
||||||
return '%s %s %s -c %s' % (become, flags, user, self._build_success_command(cmd, shell))
|
return f'{become} {flags} {user} -c {self._build_success_command(cmd, shell)}'
|
||||||
|
|||||||
@@ -2,77 +2,77 @@
|
|||||||
# Copyright (c) 2021, Ansible Project
|
# Copyright (c) 2021, Ansible Project
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = """
|
DOCUMENTATION = r"""
|
||||||
name: sudosu
|
name: sudosu
|
||||||
short_description: Run tasks using sudo su -
|
short_description: Run tasks using sudo su -
|
||||||
|
description:
|
||||||
|
- This become plugin allows your remote/login user to execute commands as another user using the C(sudo) and C(su) utilities
|
||||||
|
combined.
|
||||||
|
author:
|
||||||
|
- Dag Wieers (@dagwieers)
|
||||||
|
version_added: 2.4.0
|
||||||
|
options:
|
||||||
|
become_user:
|
||||||
|
description: User you 'become' to execute the task.
|
||||||
|
type: string
|
||||||
|
default: root
|
||||||
|
ini:
|
||||||
|
- section: privilege_escalation
|
||||||
|
key: become_user
|
||||||
|
- section: sudo_become_plugin
|
||||||
|
key: user
|
||||||
|
vars:
|
||||||
|
- name: ansible_become_user
|
||||||
|
- name: ansible_sudo_user
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_BECOME_USER
|
||||||
|
- name: ANSIBLE_SUDO_USER
|
||||||
|
become_flags:
|
||||||
|
description: Options to pass to C(sudo).
|
||||||
|
type: string
|
||||||
|
default: -H -S -n
|
||||||
|
ini:
|
||||||
|
- section: privilege_escalation
|
||||||
|
key: become_flags
|
||||||
|
- section: sudo_become_plugin
|
||||||
|
key: flags
|
||||||
|
vars:
|
||||||
|
- name: ansible_become_flags
|
||||||
|
- name: ansible_sudo_flags
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_BECOME_FLAGS
|
||||||
|
- name: ANSIBLE_SUDO_FLAGS
|
||||||
|
become_pass:
|
||||||
|
description: Password to pass to C(sudo).
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
vars:
|
||||||
|
- name: ansible_become_password
|
||||||
|
- name: ansible_become_pass
|
||||||
|
- name: ansible_sudo_pass
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_BECOME_PASS
|
||||||
|
- name: ANSIBLE_SUDO_PASS
|
||||||
|
ini:
|
||||||
|
- section: sudo_become_plugin
|
||||||
|
key: password
|
||||||
|
alt_method:
|
||||||
description:
|
description:
|
||||||
- This become plugin allows your remote/login user to execute commands as another user via the C(sudo) and C(su) utilities combined.
|
- Whether to use an alternative method to call C(su). Instead of running C(su -l user /path/to/shell -c command), it
|
||||||
author:
|
runs C(su -l user -c command).
|
||||||
- Dag Wieers (@dagwieers)
|
- Use this when the default one is not working on your system.
|
||||||
version_added: 2.4.0
|
required: false
|
||||||
options:
|
type: boolean
|
||||||
become_user:
|
ini:
|
||||||
description: User you 'become' to execute the task.
|
- section: community.general.sudosu
|
||||||
type: string
|
key: alternative_method
|
||||||
default: root
|
vars:
|
||||||
ini:
|
- name: ansible_sudosu_alt_method
|
||||||
- section: privilege_escalation
|
env:
|
||||||
key: become_user
|
- name: ANSIBLE_SUDOSU_ALT_METHOD
|
||||||
- section: sudo_become_plugin
|
version_added: 9.2.0
|
||||||
key: user
|
|
||||||
vars:
|
|
||||||
- name: ansible_become_user
|
|
||||||
- name: ansible_sudo_user
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_BECOME_USER
|
|
||||||
- name: ANSIBLE_SUDO_USER
|
|
||||||
become_flags:
|
|
||||||
description: Options to pass to C(sudo).
|
|
||||||
type: string
|
|
||||||
default: -H -S -n
|
|
||||||
ini:
|
|
||||||
- section: privilege_escalation
|
|
||||||
key: become_flags
|
|
||||||
- section: sudo_become_plugin
|
|
||||||
key: flags
|
|
||||||
vars:
|
|
||||||
- name: ansible_become_flags
|
|
||||||
- name: ansible_sudo_flags
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_BECOME_FLAGS
|
|
||||||
- name: ANSIBLE_SUDO_FLAGS
|
|
||||||
become_pass:
|
|
||||||
description: Password to pass to C(sudo).
|
|
||||||
type: string
|
|
||||||
required: false
|
|
||||||
vars:
|
|
||||||
- name: ansible_become_password
|
|
||||||
- name: ansible_become_pass
|
|
||||||
- name: ansible_sudo_pass
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_BECOME_PASS
|
|
||||||
- name: ANSIBLE_SUDO_PASS
|
|
||||||
ini:
|
|
||||||
- section: sudo_become_plugin
|
|
||||||
key: password
|
|
||||||
alt_method:
|
|
||||||
description:
|
|
||||||
- Whether to use an alternative method to call C(su). Instead of running C(su -l user /path/to/shell -c command),
|
|
||||||
it runs C(su -l user -c command).
|
|
||||||
- Use this when the default one is not working on your system.
|
|
||||||
required: false
|
|
||||||
type: boolean
|
|
||||||
ini:
|
|
||||||
- section: community.general.sudosu
|
|
||||||
key: alternative_method
|
|
||||||
vars:
|
|
||||||
- name: ansible_sudosu_alt_method
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_SUDOSU_ALT_METHOD
|
|
||||||
version_added: 9.2.0
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
@@ -98,16 +98,16 @@ class BecomeModule(BecomeBase):
|
|||||||
flags = self.get_option('become_flags') or ''
|
flags = self.get_option('become_flags') or ''
|
||||||
prompt = ''
|
prompt = ''
|
||||||
if self.get_option('become_pass'):
|
if self.get_option('become_pass'):
|
||||||
self.prompt = '[sudo via ansible, key=%s] password:' % self._id
|
self.prompt = f'[sudo via ansible, key={self._id}] password:'
|
||||||
if flags: # this could be simplified, but kept as is for now for backwards string matching
|
if flags: # this could be simplified, but kept as is for now for backwards string matching
|
||||||
flags = flags.replace('-n', '')
|
flags = flags.replace('-n', '')
|
||||||
prompt = '-p "%s"' % (self.prompt)
|
prompt = f'-p "{self.prompt}"'
|
||||||
|
|
||||||
user = self.get_option('become_user') or ''
|
user = self.get_option('become_user') or ''
|
||||||
if user:
|
if user:
|
||||||
user = '%s' % (user)
|
user = f'{user}'
|
||||||
|
|
||||||
if self.get_option('alt_method'):
|
if self.get_option('alt_method'):
|
||||||
return ' '.join([becomecmd, flags, prompt, "su -l", user, "-c", self._build_success_command(cmd, shell, True)])
|
return f"{becomecmd} {flags} {prompt} su -l {user} -c {self._build_success_command(cmd, shell, True)}"
|
||||||
else:
|
else:
|
||||||
return ' '.join([becomecmd, flags, prompt, 'su -l', user, self._build_success_command(cmd, shell)])
|
return f"{becomecmd} {flags} {prompt} su -l {user} {self._build_success_command(cmd, shell)}"
|
||||||
|
|||||||
83
plugins/cache/memcached.py
vendored
83
plugins/cache/memcached.py
vendored
@@ -4,49 +4,48 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: memcached
|
name: memcached
|
||||||
short_description: Use memcached DB for cache
|
short_description: Use memcached DB for cache
|
||||||
|
description:
|
||||||
|
- This cache uses JSON formatted, per host records saved in memcached.
|
||||||
|
requirements:
|
||||||
|
- memcache (python lib)
|
||||||
|
options:
|
||||||
|
_uri:
|
||||||
description:
|
description:
|
||||||
- This cache uses JSON formatted, per host records saved in memcached.
|
- List of connection information for the memcached DBs.
|
||||||
requirements:
|
default: ['127.0.0.1:11211']
|
||||||
- memcache (python lib)
|
type: list
|
||||||
options:
|
elements: string
|
||||||
_uri:
|
env:
|
||||||
description:
|
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
||||||
- List of connection information for the memcached DBs
|
ini:
|
||||||
default: ['127.0.0.1:11211']
|
- key: fact_caching_connection
|
||||||
type: list
|
section: defaults
|
||||||
elements: string
|
_prefix:
|
||||||
env:
|
description: User defined prefix to use when creating the DB entries.
|
||||||
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
type: string
|
||||||
ini:
|
default: ansible_facts
|
||||||
- key: fact_caching_connection
|
env:
|
||||||
section: defaults
|
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
|
||||||
_prefix:
|
ini:
|
||||||
description: User defined prefix to use when creating the DB entries
|
- key: fact_caching_prefix
|
||||||
type: string
|
section: defaults
|
||||||
default: ansible_facts
|
_timeout:
|
||||||
env:
|
default: 86400
|
||||||
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
|
type: integer
|
||||||
ini:
|
|
||||||
- key: fact_caching_prefix
|
|
||||||
section: defaults
|
|
||||||
_timeout:
|
|
||||||
default: 86400
|
|
||||||
type: integer
|
|
||||||
# TODO: determine whether it is OK to change to: type: float
|
# TODO: determine whether it is OK to change to: type: float
|
||||||
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
|
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire.
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
|
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
|
||||||
ini:
|
ini:
|
||||||
- key: fact_caching_timeout
|
- key: fact_caching_timeout
|
||||||
section: defaults
|
section: defaults
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import os
|
import os
|
||||||
@@ -55,7 +54,7 @@ from multiprocessing import Lock
|
|||||||
from itertools import chain
|
from itertools import chain
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils.common._collections_compat import MutableSet
|
from collections.abc import MutableSet
|
||||||
from ansible.plugins.cache import BaseCacheModule
|
from ansible.plugins.cache import BaseCacheModule
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
@@ -191,7 +190,7 @@ class CacheModule(BaseCacheModule):
|
|||||||
self._keys = CacheModuleKeys(self._db, self._db.get(CacheModuleKeys.PREFIX) or [])
|
self._keys = CacheModuleKeys(self._db, self._db.get(CacheModuleKeys.PREFIX) or [])
|
||||||
|
|
||||||
def _make_key(self, key):
|
def _make_key(self, key):
|
||||||
return "{0}{1}".format(self._prefix, key)
|
return f"{self._prefix}{key}"
|
||||||
|
|
||||||
def _expire_keys(self):
|
def _expire_keys(self):
|
||||||
if self._timeout > 0:
|
if self._timeout > 0:
|
||||||
|
|||||||
78
plugins/cache/pickle.py
vendored
78
plugins/cache/pickle.py
vendored
@@ -5,51 +5,49 @@
|
|||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
# Make coding more python3-ish
|
# Make coding more python3-ish
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: pickle
|
name: pickle
|
||||||
short_description: Pickle formatted files.
|
short_description: Pickle formatted files
|
||||||
|
description:
|
||||||
|
- This cache uses Python's pickle serialization format, in per host files, saved to the filesystem.
|
||||||
|
author: Brian Coca (@bcoca)
|
||||||
|
options:
|
||||||
|
_uri:
|
||||||
|
required: true
|
||||||
description:
|
description:
|
||||||
- This cache uses Python's pickle serialization format, in per host files, saved to the filesystem.
|
- Path in which the cache plugin will save the files.
|
||||||
author: Brian Coca (@bcoca)
|
env:
|
||||||
options:
|
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
||||||
_uri:
|
ini:
|
||||||
required: true
|
- key: fact_caching_connection
|
||||||
description:
|
section: defaults
|
||||||
- Path in which the cache plugin will save the files
|
type: path
|
||||||
env:
|
_prefix:
|
||||||
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
description: User defined prefix to use when creating the files.
|
||||||
ini:
|
env:
|
||||||
- key: fact_caching_connection
|
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
|
||||||
section: defaults
|
ini:
|
||||||
type: path
|
- key: fact_caching_prefix
|
||||||
_prefix:
|
section: defaults
|
||||||
description: User defined prefix to use when creating the files
|
type: string
|
||||||
env:
|
_timeout:
|
||||||
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
|
default: 86400
|
||||||
ini:
|
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire.
|
||||||
- key: fact_caching_prefix
|
env:
|
||||||
section: defaults
|
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
|
||||||
type: string
|
ini:
|
||||||
_timeout:
|
- key: fact_caching_timeout
|
||||||
default: 86400
|
section: defaults
|
||||||
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
|
type: float
|
||||||
env:
|
"""
|
||||||
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
|
|
||||||
ini:
|
|
||||||
- key: fact_caching_timeout
|
|
||||||
section: defaults
|
|
||||||
type: float
|
|
||||||
'''
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import cPickle as pickle
|
import cPickle as pickle
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import pickle
|
import pickle
|
||||||
|
|
||||||
from ansible.module_utils.six import PY3
|
|
||||||
from ansible.plugins.cache import BaseFileCacheModule
|
from ansible.plugins.cache import BaseFileCacheModule
|
||||||
|
|
||||||
|
|
||||||
@@ -57,14 +55,12 @@ class CacheModule(BaseFileCacheModule):
|
|||||||
"""
|
"""
|
||||||
A caching module backed by pickle files.
|
A caching module backed by pickle files.
|
||||||
"""
|
"""
|
||||||
|
_persistent = False # prevent unnecessary JSON serialization and key munging
|
||||||
|
|
||||||
def _load(self, filepath):
|
def _load(self, filepath):
|
||||||
# Pickle is a binary format
|
# Pickle is a binary format
|
||||||
with open(filepath, 'rb') as f:
|
with open(filepath, 'rb') as f:
|
||||||
if PY3:
|
return pickle.load(f, encoding='bytes')
|
||||||
return pickle.load(f, encoding='bytes')
|
|
||||||
else:
|
|
||||||
return pickle.load(f)
|
|
||||||
|
|
||||||
def _dump(self, value, filepath):
|
def _dump(self, value, filepath):
|
||||||
with open(filepath, 'wb') as f:
|
with open(filepath, 'wb') as f:
|
||||||
|
|||||||
128
plugins/cache/redis.py
vendored
128
plugins/cache/redis.py
vendored
@@ -3,77 +3,75 @@
|
|||||||
# Copyright (c) 2017 Ansible Project
|
# Copyright (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: redis
|
name: redis
|
||||||
short_description: Use Redis DB for cache
|
short_description: Use Redis DB for cache
|
||||||
|
description:
|
||||||
|
- This cache uses JSON formatted, per host records saved in Redis.
|
||||||
|
requirements:
|
||||||
|
- redis>=2.4.5 (python lib)
|
||||||
|
options:
|
||||||
|
_uri:
|
||||||
description:
|
description:
|
||||||
- This cache uses JSON formatted, per host records saved in Redis.
|
- A colon separated string of connection information for Redis.
|
||||||
requirements:
|
- The format is V(host:port:db:password), for example V(localhost:6379:0:changeme).
|
||||||
- redis>=2.4.5 (python lib)
|
- To use encryption in transit, prefix the connection with V(tls://), as in V(tls://localhost:6379:0:changeme).
|
||||||
options:
|
- To use redis sentinel, use separator V(;), for example V(localhost:26379;localhost:26379;0:changeme). Requires redis>=2.9.0.
|
||||||
_uri:
|
type: string
|
||||||
description:
|
required: true
|
||||||
- A colon separated string of connection information for Redis.
|
env:
|
||||||
- The format is V(host:port:db:password), for example V(localhost:6379:0:changeme).
|
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
||||||
- To use encryption in transit, prefix the connection with V(tls://), as in V(tls://localhost:6379:0:changeme).
|
ini:
|
||||||
- To use redis sentinel, use separator V(;), for example V(localhost:26379;localhost:26379;0:changeme). Requires redis>=2.9.0.
|
- key: fact_caching_connection
|
||||||
type: string
|
section: defaults
|
||||||
required: true
|
_prefix:
|
||||||
env:
|
description: User defined prefix to use when creating the DB entries.
|
||||||
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
type: string
|
||||||
ini:
|
default: ansible_facts
|
||||||
- key: fact_caching_connection
|
env:
|
||||||
section: defaults
|
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
|
||||||
_prefix:
|
ini:
|
||||||
description: User defined prefix to use when creating the DB entries
|
- key: fact_caching_prefix
|
||||||
type: string
|
section: defaults
|
||||||
default: ansible_facts
|
_keyset_name:
|
||||||
env:
|
description: User defined name for cache keyset name.
|
||||||
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
|
type: string
|
||||||
ini:
|
default: ansible_cache_keys
|
||||||
- key: fact_caching_prefix
|
env:
|
||||||
section: defaults
|
- name: ANSIBLE_CACHE_REDIS_KEYSET_NAME
|
||||||
_keyset_name:
|
ini:
|
||||||
description: User defined name for cache keyset name.
|
- key: fact_caching_redis_keyset_name
|
||||||
type: string
|
section: defaults
|
||||||
default: ansible_cache_keys
|
version_added: 1.3.0
|
||||||
env:
|
_sentinel_service_name:
|
||||||
- name: ANSIBLE_CACHE_REDIS_KEYSET_NAME
|
description: The redis sentinel service name (or referenced as cluster name).
|
||||||
ini:
|
type: string
|
||||||
- key: fact_caching_redis_keyset_name
|
env:
|
||||||
section: defaults
|
- name: ANSIBLE_CACHE_REDIS_SENTINEL
|
||||||
version_added: 1.3.0
|
ini:
|
||||||
_sentinel_service_name:
|
- key: fact_caching_redis_sentinel
|
||||||
description: The redis sentinel service name (or referenced as cluster name).
|
section: defaults
|
||||||
type: string
|
version_added: 1.3.0
|
||||||
env:
|
_timeout:
|
||||||
- name: ANSIBLE_CACHE_REDIS_SENTINEL
|
default: 86400
|
||||||
ini:
|
type: integer
|
||||||
- key: fact_caching_redis_sentinel
|
|
||||||
section: defaults
|
|
||||||
version_added: 1.3.0
|
|
||||||
_timeout:
|
|
||||||
default: 86400
|
|
||||||
type: integer
|
|
||||||
# TODO: determine whether it is OK to change to: type: float
|
# TODO: determine whether it is OK to change to: type: float
|
||||||
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
|
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire.
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
|
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
|
||||||
ini:
|
ini:
|
||||||
- key: fact_caching_timeout
|
- key: fact_caching_timeout
|
||||||
section: defaults
|
section: defaults
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
|
||||||
from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
|
from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
|
||||||
from ansible.plugins.cache import BaseCacheModule
|
from ansible.plugins.cache import BaseCacheModule
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
@@ -131,7 +129,7 @@ class CacheModule(BaseCacheModule):
|
|||||||
connection = self._parse_connection(self.re_url_conn, uri)
|
connection = self._parse_connection(self.re_url_conn, uri)
|
||||||
self._db = StrictRedis(*connection, **kw)
|
self._db = StrictRedis(*connection, **kw)
|
||||||
|
|
||||||
display.vv('Redis connection: %s' % self._db)
|
display.vv(f'Redis connection: {self._db}')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _parse_connection(re_patt, uri):
|
def _parse_connection(re_patt, uri):
|
||||||
@@ -164,12 +162,12 @@ class CacheModule(BaseCacheModule):
|
|||||||
pass # password is optional
|
pass # password is optional
|
||||||
|
|
||||||
sentinels = [self._parse_connection(self.re_sent_conn, shost) for shost in connections]
|
sentinels = [self._parse_connection(self.re_sent_conn, shost) for shost in connections]
|
||||||
display.vv('\nUsing redis sentinels: %s' % sentinels)
|
display.vv(f'\nUsing redis sentinels: {sentinels}')
|
||||||
scon = Sentinel(sentinels, **kw)
|
scon = Sentinel(sentinels, **kw)
|
||||||
try:
|
try:
|
||||||
return scon.master_for(self._sentinel_service_name, socket_timeout=0.2)
|
return scon.master_for(self._sentinel_service_name, socket_timeout=0.2)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise AnsibleError('Could not connect to redis sentinel: %s' % to_native(exc))
|
raise AnsibleError(f'Could not connect to redis sentinel: {exc}')
|
||||||
|
|
||||||
def _make_key(self, key):
|
def _make_key(self, key):
|
||||||
return self._prefix + key
|
return self._prefix + key
|
||||||
|
|||||||
78
plugins/cache/yaml.py
vendored
78
plugins/cache/yaml.py
vendored
@@ -5,48 +5,46 @@
|
|||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
# Make coding more python3-ish
|
# Make coding more python3-ish
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: yaml
|
name: yaml
|
||||||
short_description: YAML formatted files.
|
short_description: YAML formatted files
|
||||||
|
description:
|
||||||
|
- This cache uses YAML formatted, per host, files saved to the filesystem.
|
||||||
|
author: Brian Coca (@bcoca)
|
||||||
|
options:
|
||||||
|
_uri:
|
||||||
|
required: true
|
||||||
description:
|
description:
|
||||||
- This cache uses YAML formatted, per host, files saved to the filesystem.
|
- Path in which the cache plugin will save the files.
|
||||||
author: Brian Coca (@bcoca)
|
env:
|
||||||
options:
|
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
||||||
_uri:
|
ini:
|
||||||
required: true
|
- key: fact_caching_connection
|
||||||
description:
|
section: defaults
|
||||||
- Path in which the cache plugin will save the files
|
type: string
|
||||||
env:
|
_prefix:
|
||||||
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
description: User defined prefix to use when creating the files.
|
||||||
ini:
|
env:
|
||||||
- key: fact_caching_connection
|
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
|
||||||
section: defaults
|
ini:
|
||||||
type: string
|
- key: fact_caching_prefix
|
||||||
_prefix:
|
section: defaults
|
||||||
description: User defined prefix to use when creating the files
|
type: string
|
||||||
env:
|
_timeout:
|
||||||
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
|
default: 86400
|
||||||
ini:
|
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire.
|
||||||
- key: fact_caching_prefix
|
env:
|
||||||
section: defaults
|
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
|
||||||
type: string
|
ini:
|
||||||
_timeout:
|
- key: fact_caching_timeout
|
||||||
default: 86400
|
section: defaults
|
||||||
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
|
type: integer
|
||||||
env:
|
|
||||||
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
|
|
||||||
ini:
|
|
||||||
- key: fact_caching_timeout
|
|
||||||
section: defaults
|
|
||||||
type: integer
|
|
||||||
# TODO: determine whether it is OK to change to: type: float
|
# TODO: determine whether it is OK to change to: type: float
|
||||||
'''
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
import codecs
|
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
@@ -61,9 +59,9 @@ class CacheModule(BaseFileCacheModule):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def _load(self, filepath):
|
def _load(self, filepath):
|
||||||
with codecs.open(filepath, 'r', encoding='utf-8') as f:
|
with open(os.path.abspath(filepath), 'r', encoding='utf-8') as f:
|
||||||
return AnsibleLoader(f).get_single_data()
|
return AnsibleLoader(f).get_single_data()
|
||||||
|
|
||||||
def _dump(self, value, filepath):
|
def _dump(self, value, filepath):
|
||||||
with codecs.open(filepath, 'w', encoding='utf-8') as f:
|
with open(os.path.abspath(filepath), 'w', encoding='utf-8') as f:
|
||||||
yaml.dump(value, f, Dumper=AnsibleDumper, default_flow_style=False)
|
yaml.dump(value, f, Dumper=AnsibleDumper, default_flow_style=False)
|
||||||
|
|||||||
@@ -4,43 +4,43 @@
|
|||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
# Make coding more python3-ish
|
# Make coding more python3-ish
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: cgroup_memory_recap
|
name: cgroup_memory_recap
|
||||||
type: aggregate
|
type: aggregate
|
||||||
requirements:
|
requirements:
|
||||||
- whitelist in configuration
|
- whitelist in configuration
|
||||||
- cgroups
|
- cgroups
|
||||||
short_description: Profiles maximum memory usage of tasks and full execution using cgroups
|
short_description: Profiles maximum memory usage of tasks and full execution using cgroups
|
||||||
description:
|
description:
|
||||||
- This is an ansible callback plugin that profiles maximum memory usage of ansible and individual tasks, and displays a recap at the end using cgroups.
|
- This is an Ansible callback plugin that profiles maximum memory usage of Ansible and individual tasks, and displays a
|
||||||
notes:
|
recap at the end using cgroups.
|
||||||
- Requires ansible to be run from within a cgroup, such as with C(cgexec -g memory:ansible_profile ansible-playbook ...).
|
notes:
|
||||||
- This cgroup should only be used by ansible to get accurate results.
|
- Requires ansible to be run from within a C(cgroup), such as with C(cgexec -g memory:ansible_profile ansible-playbook ...).
|
||||||
- To create the cgroup, first use a command such as C(sudo cgcreate -a ec2-user:ec2-user -t ec2-user:ec2-user -g memory:ansible_profile).
|
- This C(cgroup) should only be used by Ansible to get accurate results.
|
||||||
options:
|
- To create the C(cgroup), first use a command such as C(sudo cgcreate -a ec2-user:ec2-user -t ec2-user:ec2-user -g memory:ansible_profile).
|
||||||
max_mem_file:
|
options:
|
||||||
required: true
|
max_mem_file:
|
||||||
description: Path to cgroups C(memory.max_usage_in_bytes) file. Example V(/sys/fs/cgroup/memory/ansible_profile/memory.max_usage_in_bytes).
|
required: true
|
||||||
type: str
|
description: Path to cgroups C(memory.max_usage_in_bytes) file. Example V(/sys/fs/cgroup/memory/ansible_profile/memory.max_usage_in_bytes).
|
||||||
env:
|
type: str
|
||||||
- name: CGROUP_MAX_MEM_FILE
|
env:
|
||||||
ini:
|
- name: CGROUP_MAX_MEM_FILE
|
||||||
- section: callback_cgroupmemrecap
|
ini:
|
||||||
key: max_mem_file
|
- section: callback_cgroupmemrecap
|
||||||
cur_mem_file:
|
key: max_mem_file
|
||||||
required: true
|
cur_mem_file:
|
||||||
description: Path to C(memory.usage_in_bytes) file. Example V(/sys/fs/cgroup/memory/ansible_profile/memory.usage_in_bytes).
|
required: true
|
||||||
type: str
|
description: Path to C(memory.usage_in_bytes) file. Example V(/sys/fs/cgroup/memory/ansible_profile/memory.usage_in_bytes).
|
||||||
env:
|
type: str
|
||||||
- name: CGROUP_CUR_MEM_FILE
|
env:
|
||||||
ini:
|
- name: CGROUP_CUR_MEM_FILE
|
||||||
- section: callback_cgroupmemrecap
|
ini:
|
||||||
key: cur_mem_file
|
- section: callback_cgroupmemrecap
|
||||||
'''
|
key: cur_mem_file
|
||||||
|
"""
|
||||||
|
|
||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
@@ -114,7 +114,7 @@ class CallbackModule(CallbackBase):
|
|||||||
max_results = int(f.read().strip()) / 1024 / 1024
|
max_results = int(f.read().strip()) / 1024 / 1024
|
||||||
|
|
||||||
self._display.banner('CGROUP MEMORY RECAP')
|
self._display.banner('CGROUP MEMORY RECAP')
|
||||||
self._display.display('Execution Maximum: %0.2fMB\n\n' % max_results)
|
self._display.display(f'Execution Maximum: {max_results:0.2f}MB\n\n')
|
||||||
|
|
||||||
for task, memory in self.task_results:
|
for task, memory in self.task_results:
|
||||||
self._display.display('%s (%s): %0.2fMB' % (task.get_name(), task._uuid, memory))
|
self._display.display(f'{task.get_name()} ({task._uuid}): {memory:0.2f}MB')
|
||||||
|
|||||||
@@ -4,20 +4,19 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: context_demo
|
name: context_demo
|
||||||
type: aggregate
|
type: aggregate
|
||||||
short_description: demo callback that adds play/task context
|
short_description: demo callback that adds play/task context
|
||||||
description:
|
description:
|
||||||
- Displays some play and task context along with normal output.
|
- Displays some play and task context along with normal output.
|
||||||
- This is mostly for demo purposes.
|
- This is mostly for demo purposes.
|
||||||
requirements:
|
requirements:
|
||||||
- whitelist in configuration
|
- whitelist in configuration
|
||||||
'''
|
"""
|
||||||
|
|
||||||
from ansible.plugins.callback import CallbackBase
|
from ansible.plugins.callback import CallbackBase
|
||||||
|
|
||||||
@@ -38,15 +37,15 @@ class CallbackModule(CallbackBase):
|
|||||||
self.play = None
|
self.play = None
|
||||||
|
|
||||||
def v2_on_any(self, *args, **kwargs):
|
def v2_on_any(self, *args, **kwargs):
|
||||||
self._display.display("--- play: {0} task: {1} ---".format(getattr(self.play, 'name', None), self.task))
|
self._display.display(f"--- play: {getattr(self.play, 'name', None)} task: {self.task} ---")
|
||||||
|
|
||||||
self._display.display(" --- ARGS ")
|
self._display.display(" --- ARGS ")
|
||||||
for i, a in enumerate(args):
|
for i, a in enumerate(args):
|
||||||
self._display.display(' %s: %s' % (i, a))
|
self._display.display(f' {i}: {a}')
|
||||||
|
|
||||||
self._display.display(" --- KWARGS ")
|
self._display.display(" --- KWARGS ")
|
||||||
for k in kwargs:
|
for k in kwargs:
|
||||||
self._display.display(' %s: %s' % (k, kwargs[k]))
|
self._display.display(f' {k}: {kwargs[k]}')
|
||||||
|
|
||||||
def v2_playbook_on_play_start(self, play):
|
def v2_playbook_on_play_start(self, play):
|
||||||
self.play = play
|
self.play = play
|
||||||
|
|||||||
@@ -6,23 +6,22 @@
|
|||||||
Counter enabled Ansible callback plugin (See DOCUMENTATION for more information)
|
Counter enabled Ansible callback plugin (See DOCUMENTATION for more information)
|
||||||
'''
|
'''
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: counter_enabled
|
name: counter_enabled
|
||||||
type: stdout
|
type: stdout
|
||||||
short_description: adds counters to the output items (tasks and hosts/task)
|
short_description: adds counters to the output items (tasks and hosts/task)
|
||||||
description:
|
description:
|
||||||
- Use this callback when you need a kind of progress bar on a large environments.
|
- Use this callback when you need a kind of progress bar on a large environments.
|
||||||
- You will know how many tasks has the playbook to run, and which one is actually running.
|
- You will know how many tasks has the playbook to run, and which one is actually running.
|
||||||
- You will know how many hosts may run a task, and which of them is actually running.
|
- You will know how many hosts may run a task, and which of them is actually running.
|
||||||
extends_documentation_fragment:
|
extends_documentation_fragment:
|
||||||
- default_callback
|
- default_callback
|
||||||
requirements:
|
requirements:
|
||||||
- set as stdout callback in C(ansible.cfg) (C(stdout_callback = counter_enabled))
|
- set as stdout callback in C(ansible.cfg) (C(stdout_callback = counter_enabled))
|
||||||
'''
|
"""
|
||||||
|
|
||||||
from ansible import constants as C
|
from ansible import constants as C
|
||||||
from ansible.plugins.callback import CallbackBase
|
from ansible.plugins.callback import CallbackBase
|
||||||
@@ -71,7 +70,7 @@ class CallbackModule(CallbackBase):
|
|||||||
if not name:
|
if not name:
|
||||||
msg = u"play"
|
msg = u"play"
|
||||||
else:
|
else:
|
||||||
msg = u"PLAY [%s]" % name
|
msg = f"PLAY [{name}]"
|
||||||
|
|
||||||
self._play = play
|
self._play = play
|
||||||
|
|
||||||
@@ -91,25 +90,17 @@ class CallbackModule(CallbackBase):
|
|||||||
for host in hosts:
|
for host in hosts:
|
||||||
stat = stats.summarize(host)
|
stat = stats.summarize(host)
|
||||||
|
|
||||||
self._display.display(u"%s : %s %s %s %s %s %s" % (
|
self._display.display(
|
||||||
hostcolor(host, stat),
|
f"{hostcolor(host, stat)} : {colorize('ok', stat['ok'], C.COLOR_OK)} {colorize('changed', stat['changed'], C.COLOR_CHANGED)} "
|
||||||
colorize(u'ok', stat['ok'], C.COLOR_OK),
|
f"{colorize('unreachable', stat['unreachable'], C.COLOR_UNREACHABLE)} {colorize('failed', stat['failures'], C.COLOR_ERROR)} "
|
||||||
colorize(u'changed', stat['changed'], C.COLOR_CHANGED),
|
f"{colorize('rescued', stat['rescued'], C.COLOR_OK)} {colorize('ignored', stat['ignored'], C.COLOR_WARN)}",
|
||||||
colorize(u'unreachable', stat['unreachable'], C.COLOR_UNREACHABLE),
|
|
||||||
colorize(u'failed', stat['failures'], C.COLOR_ERROR),
|
|
||||||
colorize(u'rescued', stat['rescued'], C.COLOR_OK),
|
|
||||||
colorize(u'ignored', stat['ignored'], C.COLOR_WARN)),
|
|
||||||
screen_only=True
|
screen_only=True
|
||||||
)
|
)
|
||||||
|
|
||||||
self._display.display(u"%s : %s %s %s %s %s %s" % (
|
self._display.display(
|
||||||
hostcolor(host, stat, False),
|
f"{hostcolor(host, stat, False)} : {colorize('ok', stat['ok'], None)} {colorize('changed', stat['changed'], None)} "
|
||||||
colorize(u'ok', stat['ok'], None),
|
f"{colorize('unreachable', stat['unreachable'], None)} {colorize('failed', stat['failures'], None)} "
|
||||||
colorize(u'changed', stat['changed'], None),
|
f"{colorize('rescued', stat['rescued'], None)} {colorize('ignored', stat['ignored'], None)}",
|
||||||
colorize(u'unreachable', stat['unreachable'], None),
|
|
||||||
colorize(u'failed', stat['failures'], None),
|
|
||||||
colorize(u'rescued', stat['rescued'], None),
|
|
||||||
colorize(u'ignored', stat['ignored'], None)),
|
|
||||||
log_only=True
|
log_only=True
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -124,12 +115,14 @@ class CallbackModule(CallbackBase):
|
|||||||
for k in sorted(stats.custom.keys()):
|
for k in sorted(stats.custom.keys()):
|
||||||
if k == '_run':
|
if k == '_run':
|
||||||
continue
|
continue
|
||||||
self._display.display('\t%s: %s' % (k, self._dump_results(stats.custom[k], indent=1).replace('\n', '')))
|
_custom_stats = self._dump_results(stats.custom[k], indent=1).replace('\n', '')
|
||||||
|
self._display.display(f'\t{k}: {_custom_stats}')
|
||||||
|
|
||||||
# print per run custom stats
|
# print per run custom stats
|
||||||
if '_run' in stats.custom:
|
if '_run' in stats.custom:
|
||||||
self._display.display("", screen_only=True)
|
self._display.display("", screen_only=True)
|
||||||
self._display.display('\tRUN: %s' % self._dump_results(stats.custom['_run'], indent=1).replace('\n', ''))
|
_custom_stats_run = self._dump_results(stats.custom['_run'], indent=1).replace('\n', '')
|
||||||
|
self._display.display(f'\tRUN: {_custom_stats_run}')
|
||||||
self._display.display("", screen_only=True)
|
self._display.display("", screen_only=True)
|
||||||
|
|
||||||
def v2_playbook_on_task_start(self, task, is_conditional):
|
def v2_playbook_on_task_start(self, task, is_conditional):
|
||||||
@@ -143,13 +136,13 @@ class CallbackModule(CallbackBase):
|
|||||||
# that they can secure this if they feel that their stdout is insecure
|
# that they can secure this if they feel that their stdout is insecure
|
||||||
# (shoulder surfing, logging stdout straight to a file, etc).
|
# (shoulder surfing, logging stdout straight to a file, etc).
|
||||||
if not task.no_log and C.DISPLAY_ARGS_TO_STDOUT:
|
if not task.no_log and C.DISPLAY_ARGS_TO_STDOUT:
|
||||||
args = ', '.join(('%s=%s' % a for a in task.args.items()))
|
args = ', '.join(('{k}={v}' for k, v in task.args.items()))
|
||||||
args = ' %s' % args
|
args = f' {args}'
|
||||||
self._display.banner("TASK %d/%d [%s%s]" % (self._task_counter, self._task_total, task.get_name().strip(), args))
|
self._display.banner(f"TASK {self._task_counter}/{self._task_total} [{task.get_name().strip()}{args}]")
|
||||||
if self._display.verbosity >= 2:
|
if self._display.verbosity >= 2:
|
||||||
path = task.get_path()
|
path = task.get_path()
|
||||||
if path:
|
if path:
|
||||||
self._display.display("task path: %s" % path, color=C.COLOR_DEBUG)
|
self._display.display(f"task path: {path}", color=C.COLOR_DEBUG)
|
||||||
self._host_counter = self._previous_batch_total
|
self._host_counter = self._previous_batch_total
|
||||||
self._task_counter += 1
|
self._task_counter += 1
|
||||||
|
|
||||||
@@ -166,15 +159,15 @@ class CallbackModule(CallbackBase):
|
|||||||
return
|
return
|
||||||
elif result._result.get('changed', False):
|
elif result._result.get('changed', False):
|
||||||
if delegated_vars:
|
if delegated_vars:
|
||||||
msg = "changed: %d/%d [%s -> %s]" % (self._host_counter, self._host_total, result._host.get_name(), delegated_vars['ansible_host'])
|
msg = f"changed: {self._host_counter}/{self._host_total} [{result._host.get_name()} -> {delegated_vars['ansible_host']}]"
|
||||||
else:
|
else:
|
||||||
msg = "changed: %d/%d [%s]" % (self._host_counter, self._host_total, result._host.get_name())
|
msg = f"changed: {self._host_counter}/{self._host_total} [{result._host.get_name()}]"
|
||||||
color = C.COLOR_CHANGED
|
color = C.COLOR_CHANGED
|
||||||
else:
|
else:
|
||||||
if delegated_vars:
|
if delegated_vars:
|
||||||
msg = "ok: %d/%d [%s -> %s]" % (self._host_counter, self._host_total, result._host.get_name(), delegated_vars['ansible_host'])
|
msg = f"ok: {self._host_counter}/{self._host_total} [{result._host.get_name()} -> {delegated_vars['ansible_host']}]"
|
||||||
else:
|
else:
|
||||||
msg = "ok: %d/%d [%s]" % (self._host_counter, self._host_total, result._host.get_name())
|
msg = f"ok: {self._host_counter}/{self._host_total} [{result._host.get_name()}]"
|
||||||
color = C.COLOR_OK
|
color = C.COLOR_OK
|
||||||
|
|
||||||
self._handle_warnings(result._result)
|
self._handle_warnings(result._result)
|
||||||
@@ -185,7 +178,7 @@ class CallbackModule(CallbackBase):
|
|||||||
self._clean_results(result._result, result._task.action)
|
self._clean_results(result._result, result._task.action)
|
||||||
|
|
||||||
if self._run_is_verbose(result):
|
if self._run_is_verbose(result):
|
||||||
msg += " => %s" % (self._dump_results(result._result),)
|
msg += f" => {self._dump_results(result._result)}"
|
||||||
self._display.display(msg, color=color)
|
self._display.display(msg, color=color)
|
||||||
|
|
||||||
def v2_runner_on_failed(self, result, ignore_errors=False):
|
def v2_runner_on_failed(self, result, ignore_errors=False):
|
||||||
@@ -206,14 +199,16 @@ class CallbackModule(CallbackBase):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
if delegated_vars:
|
if delegated_vars:
|
||||||
self._display.display("fatal: %d/%d [%s -> %s]: FAILED! => %s" % (self._host_counter, self._host_total,
|
self._display.display(
|
||||||
result._host.get_name(), delegated_vars['ansible_host'],
|
f"fatal: {self._host_counter}/{self._host_total} [{result._host.get_name()} -> "
|
||||||
self._dump_results(result._result)),
|
f"{delegated_vars['ansible_host']}]: FAILED! => {self._dump_results(result._result)}",
|
||||||
color=C.COLOR_ERROR)
|
color=C.COLOR_ERROR
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self._display.display("fatal: %d/%d [%s]: FAILED! => %s" % (self._host_counter, self._host_total,
|
self._display.display(
|
||||||
result._host.get_name(), self._dump_results(result._result)),
|
f"fatal: {self._host_counter}/{self._host_total} [{result._host.get_name()}]: FAILED! => {self._dump_results(result._result)}",
|
||||||
color=C.COLOR_ERROR)
|
color=C.COLOR_ERROR
|
||||||
|
)
|
||||||
|
|
||||||
if ignore_errors:
|
if ignore_errors:
|
||||||
self._display.display("...ignoring", color=C.COLOR_SKIP)
|
self._display.display("...ignoring", color=C.COLOR_SKIP)
|
||||||
@@ -231,9 +226,9 @@ class CallbackModule(CallbackBase):
|
|||||||
if result._task.loop and 'results' in result._result:
|
if result._task.loop and 'results' in result._result:
|
||||||
self._process_items(result)
|
self._process_items(result)
|
||||||
else:
|
else:
|
||||||
msg = "skipping: %d/%d [%s]" % (self._host_counter, self._host_total, result._host.get_name())
|
msg = f"skipping: {self._host_counter}/{self._host_total} [{result._host.get_name()}]"
|
||||||
if self._run_is_verbose(result):
|
if self._run_is_verbose(result):
|
||||||
msg += " => %s" % self._dump_results(result._result)
|
msg += f" => {self._dump_results(result._result)}"
|
||||||
self._display.display(msg, color=C.COLOR_SKIP)
|
self._display.display(msg, color=C.COLOR_SKIP)
|
||||||
|
|
||||||
def v2_runner_on_unreachable(self, result):
|
def v2_runner_on_unreachable(self, result):
|
||||||
@@ -244,11 +239,13 @@ class CallbackModule(CallbackBase):
|
|||||||
|
|
||||||
delegated_vars = result._result.get('_ansible_delegated_vars', None)
|
delegated_vars = result._result.get('_ansible_delegated_vars', None)
|
||||||
if delegated_vars:
|
if delegated_vars:
|
||||||
self._display.display("fatal: %d/%d [%s -> %s]: UNREACHABLE! => %s" % (self._host_counter, self._host_total,
|
self._display.display(
|
||||||
result._host.get_name(), delegated_vars['ansible_host'],
|
f"fatal: {self._host_counter}/{self._host_total} [{result._host.get_name()} -> "
|
||||||
self._dump_results(result._result)),
|
f"{delegated_vars['ansible_host']}]: UNREACHABLE! => {self._dump_results(result._result)}",
|
||||||
color=C.COLOR_UNREACHABLE)
|
color=C.COLOR_UNREACHABLE
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self._display.display("fatal: %d/%d [%s]: UNREACHABLE! => %s" % (self._host_counter, self._host_total,
|
self._display.display(
|
||||||
result._host.get_name(), self._dump_results(result._result)),
|
f"fatal: {self._host_counter}/{self._host_total} [{result._host.get_name()}]: UNREACHABLE! => {self._dump_results(result._result)}",
|
||||||
color=C.COLOR_UNREACHABLE)
|
color=C.COLOR_UNREACHABLE
|
||||||
|
)
|
||||||
|
|||||||
@@ -4,35 +4,33 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = r'''
|
DOCUMENTATION = r"""
|
||||||
name: default_without_diff
|
name: default_without_diff
|
||||||
type: stdout
|
type: stdout
|
||||||
short_description: The default ansible callback without diff output
|
short_description: The default ansible callback without diff output
|
||||||
version_added: 8.4.0
|
version_added: 8.4.0
|
||||||
description:
|
description:
|
||||||
- This is basically the default ansible callback plugin (P(ansible.builtin.default#callback)) without
|
- This is basically the default ansible callback plugin (P(ansible.builtin.default#callback)) without showing diff output.
|
||||||
showing diff output. This can be useful when using another callback which sends more detailed information
|
This can be useful when using another callback which sends more detailed information to another service, like the L(ARA,
|
||||||
to another service, like the L(ARA, https://ara.recordsansible.org/) callback, and you want diff output
|
https://ara.recordsansible.org/) callback, and you want diff output sent to that plugin but not shown on the console output.
|
||||||
sent to that plugin but not shown on the console output.
|
author: Felix Fontein (@felixfontein)
|
||||||
author: Felix Fontein (@felixfontein)
|
extends_documentation_fragment:
|
||||||
extends_documentation_fragment:
|
- ansible.builtin.default_callback
|
||||||
- ansible.builtin.default_callback
|
- ansible.builtin.result_format_callback
|
||||||
- ansible.builtin.result_format_callback
|
"""
|
||||||
'''
|
|
||||||
|
|
||||||
EXAMPLES = r'''
|
EXAMPLES = r"""
|
||||||
# Enable callback in ansible.cfg:
|
# Enable callback in ansible.cfg:
|
||||||
ansible_config: |
|
ansible_config: |
|
||||||
[defaults]
|
[defaults]
|
||||||
stdout_callback = community.general.default_without_diff
|
stdout_callback = community.general.default_without_diff
|
||||||
|
|
||||||
# Enable callback with environment variables:
|
# Enable callback with environment variables:
|
||||||
environment_variable: |
|
environment_variable: |-
|
||||||
ANSIBLE_STDOUT_CALLBACK=community.general.default_without_diff
|
ANSIBLE_STDOUT_CALLBACK=community.general.default_without_diff
|
||||||
'''
|
"""
|
||||||
|
|
||||||
from ansible.plugins.callback.default import CallbackModule as Default
|
from ansible.plugins.callback.default import CallbackModule as Default
|
||||||
|
|
||||||
|
|||||||
@@ -4,22 +4,21 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: dense
|
name: dense
|
||||||
type: stdout
|
type: stdout
|
||||||
short_description: minimal stdout output
|
short_description: minimal stdout output
|
||||||
extends_documentation_fragment:
|
extends_documentation_fragment:
|
||||||
- default_callback
|
- default_callback
|
||||||
description:
|
description:
|
||||||
- When in verbose mode it will act the same as the default callback.
|
- When in verbose mode it will act the same as the default callback.
|
||||||
author:
|
author:
|
||||||
- Dag Wieers (@dagwieers)
|
- Dag Wieers (@dagwieers)
|
||||||
requirements:
|
requirements:
|
||||||
- set as stdout in configuration
|
- set as stdout in configuration
|
||||||
'''
|
"""
|
||||||
|
|
||||||
HAS_OD = False
|
HAS_OD = False
|
||||||
try:
|
try:
|
||||||
@@ -28,8 +27,7 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
from ansible.module_utils.six import binary_type, text_type
|
from collections.abc import MutableMapping, MutableSequence
|
||||||
from ansible.module_utils.common._collections_compat import MutableMapping, MutableSequence
|
|
||||||
from ansible.plugins.callback.default import CallbackModule as CallbackModule_default
|
from ansible.plugins.callback.default import CallbackModule as CallbackModule_default
|
||||||
from ansible.utils.color import colorize, hostcolor
|
from ansible.utils.color import colorize, hostcolor
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
@@ -195,7 +193,7 @@ class CallbackModule(CallbackModule_default):
|
|||||||
self.disabled = True
|
self.disabled = True
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + '\n' + vt100.save + vt100.clearline)
|
sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}")
|
||||||
|
|
||||||
def _add_host(self, result, status):
|
def _add_host(self, result, status):
|
||||||
name = result._host.get_name()
|
name = result._host.get_name()
|
||||||
@@ -237,13 +235,13 @@ class CallbackModule(CallbackModule_default):
|
|||||||
|
|
||||||
# Remove empty attributes (list, dict, str)
|
# Remove empty attributes (list, dict, str)
|
||||||
for attr in result.copy():
|
for attr in result.copy():
|
||||||
if isinstance(result[attr], (MutableSequence, MutableMapping, binary_type, text_type)):
|
if isinstance(result[attr], (MutableSequence, MutableMapping, bytes, str)):
|
||||||
if not result[attr]:
|
if not result[attr]:
|
||||||
del result[attr]
|
del result[attr]
|
||||||
|
|
||||||
def _handle_exceptions(self, result):
|
def _handle_exceptions(self, result):
|
||||||
if 'exception' in result:
|
if 'exception' in result:
|
||||||
# Remove the exception from the result so it's not shown every time
|
# Remove the exception from the result so it is not shown every time
|
||||||
del result['exception']
|
del result['exception']
|
||||||
|
|
||||||
if self._display.verbosity == 1:
|
if self._display.verbosity == 1:
|
||||||
@@ -252,7 +250,7 @@ class CallbackModule(CallbackModule_default):
|
|||||||
def _display_progress(self, result=None):
|
def _display_progress(self, result=None):
|
||||||
# Always rewrite the complete line
|
# Always rewrite the complete line
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline + vt100.nolinewrap + vt100.underline)
|
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline + vt100.nolinewrap + vt100.underline)
|
||||||
sys.stdout.write('%s %d:' % (self.type, self.count[self.type]))
|
sys.stdout.write(f'{self.type} {self.count[self.type]}:')
|
||||||
sys.stdout.write(vt100.reset)
|
sys.stdout.write(vt100.reset)
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
@@ -260,7 +258,7 @@ class CallbackModule(CallbackModule_default):
|
|||||||
for name in self.hosts:
|
for name in self.hosts:
|
||||||
sys.stdout.write(' ')
|
sys.stdout.write(' ')
|
||||||
if self.hosts[name].get('delegate', None):
|
if self.hosts[name].get('delegate', None):
|
||||||
sys.stdout.write(self.hosts[name]['delegate'] + '>')
|
sys.stdout.write(f"{self.hosts[name]['delegate']}>")
|
||||||
sys.stdout.write(colors[self.hosts[name]['state']] + name + vt100.reset)
|
sys.stdout.write(colors[self.hosts[name]['state']] + name + vt100.reset)
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
@@ -274,8 +272,8 @@ class CallbackModule(CallbackModule_default):
|
|||||||
if not self.shown_title:
|
if not self.shown_title:
|
||||||
self.shown_title = True
|
self.shown_title = True
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline + vt100.underline)
|
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline + vt100.underline)
|
||||||
sys.stdout.write('%s %d: %s' % (self.type, self.count[self.type], self.task.get_name().strip()))
|
sys.stdout.write(f'{self.type} {self.count[self.type]}: {self.task.get_name().strip()}')
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + '\n' + vt100.save + vt100.clearline)
|
sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}")
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
else:
|
else:
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline)
|
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline)
|
||||||
@@ -284,7 +282,7 @@ class CallbackModule(CallbackModule_default):
|
|||||||
def _display_results(self, result, status):
|
def _display_results(self, result, status):
|
||||||
# Leave the previous task on screen (as it has changes/errors)
|
# Leave the previous task on screen (as it has changes/errors)
|
||||||
if self._display.verbosity == 0 and self.keep:
|
if self._display.verbosity == 0 and self.keep:
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + '\n' + vt100.save + vt100.clearline)
|
sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}")
|
||||||
else:
|
else:
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline)
|
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline)
|
||||||
self.keep = False
|
self.keep = False
|
||||||
@@ -309,15 +307,15 @@ class CallbackModule(CallbackModule_default):
|
|||||||
if result._task.loop and 'results' in result._result:
|
if result._task.loop and 'results' in result._result:
|
||||||
self._process_items(result)
|
self._process_items(result)
|
||||||
else:
|
else:
|
||||||
sys.stdout.write(colors[status] + status + ': ')
|
sys.stdout.write(f"{colors[status] + status}: ")
|
||||||
|
|
||||||
delegated_vars = result._result.get('_ansible_delegated_vars', None)
|
delegated_vars = result._result.get('_ansible_delegated_vars', None)
|
||||||
if delegated_vars:
|
if delegated_vars:
|
||||||
sys.stdout.write(vt100.reset + result._host.get_name() + '>' + colors[status] + delegated_vars['ansible_host'])
|
sys.stdout.write(f"{vt100.reset + result._host.get_name()}>{colors[status]}{delegated_vars['ansible_host']}")
|
||||||
else:
|
else:
|
||||||
sys.stdout.write(result._host.get_name())
|
sys.stdout.write(result._host.get_name())
|
||||||
|
|
||||||
sys.stdout.write(': ' + dump + '\n')
|
sys.stdout.write(f": {dump}\n")
|
||||||
sys.stdout.write(vt100.reset + vt100.save + vt100.clearline)
|
sys.stdout.write(vt100.reset + vt100.save + vt100.clearline)
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
@@ -327,7 +325,7 @@ class CallbackModule(CallbackModule_default):
|
|||||||
def v2_playbook_on_play_start(self, play):
|
def v2_playbook_on_play_start(self, play):
|
||||||
# Leave the previous task on screen (as it has changes/errors)
|
# Leave the previous task on screen (as it has changes/errors)
|
||||||
if self._display.verbosity == 0 and self.keep:
|
if self._display.verbosity == 0 and self.keep:
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + '\n' + vt100.save + vt100.clearline + vt100.bold)
|
sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}{vt100.bold}")
|
||||||
else:
|
else:
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline + vt100.bold)
|
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline + vt100.bold)
|
||||||
|
|
||||||
@@ -341,14 +339,14 @@ class CallbackModule(CallbackModule_default):
|
|||||||
name = play.get_name().strip()
|
name = play.get_name().strip()
|
||||||
if not name:
|
if not name:
|
||||||
name = 'unnamed'
|
name = 'unnamed'
|
||||||
sys.stdout.write('PLAY %d: %s' % (self.count['play'], name.upper()))
|
sys.stdout.write(f"PLAY {self.count['play']}: {name.upper()}")
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + '\n' + vt100.save + vt100.clearline)
|
sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}")
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
def v2_playbook_on_task_start(self, task, is_conditional):
|
def v2_playbook_on_task_start(self, task, is_conditional):
|
||||||
# Leave the previous task on screen (as it has changes/errors)
|
# Leave the previous task on screen (as it has changes/errors)
|
||||||
if self._display.verbosity == 0 and self.keep:
|
if self._display.verbosity == 0 and self.keep:
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + '\n' + vt100.save + vt100.clearline + vt100.underline)
|
sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}{vt100.underline}")
|
||||||
else:
|
else:
|
||||||
# Do not clear line, since we want to retain the previous output
|
# Do not clear line, since we want to retain the previous output
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + vt100.underline)
|
sys.stdout.write(vt100.restore + vt100.reset + vt100.underline)
|
||||||
@@ -365,14 +363,14 @@ class CallbackModule(CallbackModule_default):
|
|||||||
self.count['task'] += 1
|
self.count['task'] += 1
|
||||||
|
|
||||||
# Write the next task on screen (behind the prompt is the previous output)
|
# Write the next task on screen (behind the prompt is the previous output)
|
||||||
sys.stdout.write('%s %d.' % (self.type, self.count[self.type]))
|
sys.stdout.write(f'{self.type} {self.count[self.type]}.')
|
||||||
sys.stdout.write(vt100.reset)
|
sys.stdout.write(vt100.reset)
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
def v2_playbook_on_handler_task_start(self, task):
|
def v2_playbook_on_handler_task_start(self, task):
|
||||||
# Leave the previous task on screen (as it has changes/errors)
|
# Leave the previous task on screen (as it has changes/errors)
|
||||||
if self._display.verbosity == 0 and self.keep:
|
if self._display.verbosity == 0 and self.keep:
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + '\n' + vt100.save + vt100.clearline + vt100.underline)
|
sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}{vt100.underline}")
|
||||||
else:
|
else:
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline + vt100.underline)
|
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline + vt100.underline)
|
||||||
|
|
||||||
@@ -388,7 +386,7 @@ class CallbackModule(CallbackModule_default):
|
|||||||
self.count[self.type] += 1
|
self.count[self.type] += 1
|
||||||
|
|
||||||
# Write the next task on screen (behind the prompt is the previous output)
|
# Write the next task on screen (behind the prompt is the previous output)
|
||||||
sys.stdout.write('%s %d.' % (self.type, self.count[self.type]))
|
sys.stdout.write(f'{self.type} {self.count[self.type]}.')
|
||||||
sys.stdout.write(vt100.reset)
|
sys.stdout.write(vt100.reset)
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
@@ -451,13 +449,13 @@ class CallbackModule(CallbackModule_default):
|
|||||||
|
|
||||||
def v2_playbook_on_no_hosts_remaining(self):
|
def v2_playbook_on_no_hosts_remaining(self):
|
||||||
if self._display.verbosity == 0 and self.keep:
|
if self._display.verbosity == 0 and self.keep:
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + '\n' + vt100.save + vt100.clearline)
|
sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}")
|
||||||
else:
|
else:
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline)
|
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline)
|
||||||
self.keep = False
|
self.keep = False
|
||||||
|
|
||||||
sys.stdout.write(vt100.white + vt100.redbg + 'NO MORE HOSTS LEFT')
|
sys.stdout.write(f"{vt100.white + vt100.redbg}NO MORE HOSTS LEFT")
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + '\n' + vt100.save + vt100.clearline)
|
sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}")
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
def v2_playbook_on_include(self, included_file):
|
def v2_playbook_on_include(self, included_file):
|
||||||
@@ -465,7 +463,7 @@ class CallbackModule(CallbackModule_default):
|
|||||||
|
|
||||||
def v2_playbook_on_stats(self, stats):
|
def v2_playbook_on_stats(self, stats):
|
||||||
if self._display.verbosity == 0 and self.keep:
|
if self._display.verbosity == 0 and self.keep:
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + '\n' + vt100.save + vt100.clearline)
|
sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}")
|
||||||
else:
|
else:
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline)
|
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline)
|
||||||
|
|
||||||
@@ -476,22 +474,16 @@ class CallbackModule(CallbackModule_default):
|
|||||||
sys.stdout.write(vt100.bold + vt100.underline)
|
sys.stdout.write(vt100.bold + vt100.underline)
|
||||||
sys.stdout.write('SUMMARY')
|
sys.stdout.write('SUMMARY')
|
||||||
|
|
||||||
sys.stdout.write(vt100.restore + vt100.reset + '\n' + vt100.save + vt100.clearline)
|
sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}")
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
hosts = sorted(stats.processed.keys())
|
hosts = sorted(stats.processed.keys())
|
||||||
for h in hosts:
|
for h in hosts:
|
||||||
t = stats.summarize(h)
|
t = stats.summarize(h)
|
||||||
self._display.display(
|
self._display.display(
|
||||||
u"%s : %s %s %s %s %s %s" % (
|
f"{hostcolor(h, t)} : {colorize('ok', t['ok'], C.COLOR_OK)} {colorize('changed', t['changed'], C.COLOR_CHANGED)} "
|
||||||
hostcolor(h, t),
|
f"{colorize('unreachable', t['unreachable'], C.COLOR_UNREACHABLE)} {colorize('failed', t['failures'], C.COLOR_ERROR)} "
|
||||||
colorize(u'ok', t['ok'], C.COLOR_OK),
|
f"{colorize('rescued', t['rescued'], C.COLOR_OK)} {colorize('ignored', t['ignored'], C.COLOR_WARN)}",
|
||||||
colorize(u'changed', t['changed'], C.COLOR_CHANGED),
|
|
||||||
colorize(u'unreachable', t['unreachable'], C.COLOR_UNREACHABLE),
|
|
||||||
colorize(u'failed', t['failures'], C.COLOR_ERROR),
|
|
||||||
colorize(u'rescued', t['rescued'], C.COLOR_OK),
|
|
||||||
colorize(u'ignored', t['ignored'], C.COLOR_WARN),
|
|
||||||
),
|
|
||||||
screen_only=True
|
screen_only=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -2,72 +2,71 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Victor Martinez (@v1v) <VictorMartinezRubio@gmail.com>
|
author: Victor Martinez (@v1v) <VictorMartinezRubio@gmail.com>
|
||||||
name: elastic
|
name: elastic
|
||||||
type: notification
|
type: notification
|
||||||
short_description: Create distributed traces for each Ansible task in Elastic APM
|
short_description: Create distributed traces for each Ansible task in Elastic APM
|
||||||
version_added: 3.8.0
|
version_added: 3.8.0
|
||||||
|
description:
|
||||||
|
- This callback creates distributed traces for each Ansible task in Elastic APM.
|
||||||
|
- You can configure the plugin with environment variables.
|
||||||
|
- See U(https://www.elastic.co/guide/en/apm/agent/python/current/configuration.html).
|
||||||
|
options:
|
||||||
|
hide_task_arguments:
|
||||||
|
default: false
|
||||||
|
type: bool
|
||||||
description:
|
description:
|
||||||
- This callback creates distributed traces for each Ansible task in Elastic APM.
|
- Hide the arguments for a task.
|
||||||
- You can configure the plugin with environment variables.
|
env:
|
||||||
- See U(https://www.elastic.co/guide/en/apm/agent/python/current/configuration.html).
|
- name: ANSIBLE_OPENTELEMETRY_HIDE_TASK_ARGUMENTS
|
||||||
options:
|
apm_service_name:
|
||||||
hide_task_arguments:
|
default: ansible
|
||||||
default: false
|
type: str
|
||||||
type: bool
|
description:
|
||||||
description:
|
- The service name resource attribute.
|
||||||
- Hide the arguments for a task.
|
env:
|
||||||
env:
|
- name: ELASTIC_APM_SERVICE_NAME
|
||||||
- name: ANSIBLE_OPENTELEMETRY_HIDE_TASK_ARGUMENTS
|
apm_server_url:
|
||||||
apm_service_name:
|
type: str
|
||||||
default: ansible
|
description:
|
||||||
type: str
|
- Use the APM server and its environment variables.
|
||||||
description:
|
env:
|
||||||
- The service name resource attribute.
|
- name: ELASTIC_APM_SERVER_URL
|
||||||
env:
|
apm_secret_token:
|
||||||
- name: ELASTIC_APM_SERVICE_NAME
|
type: str
|
||||||
apm_server_url:
|
description:
|
||||||
type: str
|
- Use the APM server token.
|
||||||
description:
|
env:
|
||||||
- Use the APM server and its environment variables.
|
- name: ELASTIC_APM_SECRET_TOKEN
|
||||||
env:
|
apm_api_key:
|
||||||
- name: ELASTIC_APM_SERVER_URL
|
type: str
|
||||||
apm_secret_token:
|
description:
|
||||||
type: str
|
- Use the APM API key.
|
||||||
description:
|
env:
|
||||||
- Use the APM server token
|
- name: ELASTIC_APM_API_KEY
|
||||||
env:
|
apm_verify_server_cert:
|
||||||
- name: ELASTIC_APM_SECRET_TOKEN
|
default: true
|
||||||
apm_api_key:
|
type: bool
|
||||||
type: str
|
description:
|
||||||
description:
|
- Verifies the SSL certificate if an HTTPS connection.
|
||||||
- Use the APM API key
|
env:
|
||||||
env:
|
- name: ELASTIC_APM_VERIFY_SERVER_CERT
|
||||||
- name: ELASTIC_APM_API_KEY
|
traceparent:
|
||||||
apm_verify_server_cert:
|
type: str
|
||||||
default: true
|
description:
|
||||||
type: bool
|
- The L(W3C Trace Context header traceparent,https://www.w3.org/TR/trace-context-1/#traceparent-header).
|
||||||
description:
|
env:
|
||||||
- Verifies the SSL certificate if an HTTPS connection.
|
- name: TRACEPARENT
|
||||||
env:
|
requirements:
|
||||||
- name: ELASTIC_APM_VERIFY_SERVER_CERT
|
- elastic-apm (Python library)
|
||||||
traceparent:
|
"""
|
||||||
type: str
|
|
||||||
description:
|
|
||||||
- The L(W3C Trace Context header traceparent,https://www.w3.org/TR/trace-context-1/#traceparent-header).
|
|
||||||
env:
|
|
||||||
- name: TRACEPARENT
|
|
||||||
requirements:
|
|
||||||
- elastic-apm (Python library)
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
|
||||||
EXAMPLES = '''
|
EXAMPLES = r"""
|
||||||
examples: |
|
examples: |-
|
||||||
Enable the plugin in ansible.cfg:
|
Enable the plugin in ansible.cfg:
|
||||||
[defaults]
|
[defaults]
|
||||||
callbacks_enabled = community.general.elastic
|
callbacks_enabled = community.general.elastic
|
||||||
@@ -76,7 +75,7 @@ examples: |
|
|||||||
export ELASTIC_APM_SERVER_URL=<your APM server URL)>
|
export ELASTIC_APM_SERVER_URL=<your APM server URL)>
|
||||||
export ELASTIC_APM_SERVICE_NAME=your_service_name
|
export ELASTIC_APM_SERVICE_NAME=your_service_name
|
||||||
export ELASTIC_APM_API_KEY=your_APM_API_KEY
|
export ELASTIC_APM_API_KEY=your_APM_API_KEY
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import getpass
|
import getpass
|
||||||
import socket
|
import socket
|
||||||
@@ -88,7 +87,6 @@ from contextlib import closing
|
|||||||
from os.path import basename
|
from os.path import basename
|
||||||
|
|
||||||
from ansible.errors import AnsibleError, AnsibleRuntimeError
|
from ansible.errors import AnsibleError, AnsibleRuntimeError
|
||||||
from ansible.module_utils.six import raise_from
|
|
||||||
from ansible.plugins.callback import CallbackBase
|
from ansible.plugins.callback import CallbackBase
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -118,7 +116,7 @@ class TaskData:
|
|||||||
if host.uuid in self.host_data:
|
if host.uuid in self.host_data:
|
||||||
if host.status == 'included':
|
if host.status == 'included':
|
||||||
# concatenate task include output from multiple items
|
# concatenate task include output from multiple items
|
||||||
host.result = '%s\n%s' % (self.host_data[host.uuid].result, host.result)
|
host.result = f'{self.host_data[host.uuid].result}\n{host.result}'
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -166,7 +164,7 @@ class ElasticSource(object):
|
|||||||
args = None
|
args = None
|
||||||
|
|
||||||
if not task.no_log and not hide_task_arguments:
|
if not task.no_log and not hide_task_arguments:
|
||||||
args = ', '.join(('%s=%s' % a for a in task.args.items()))
|
args = ', '.join((f'{k}={v}' for k, v in task.args.items()))
|
||||||
|
|
||||||
tasks_data[uuid] = TaskData(uuid, name, path, play_name, action, args)
|
tasks_data[uuid] = TaskData(uuid, name, path, play_name, action, args)
|
||||||
|
|
||||||
@@ -225,7 +223,7 @@ class ElasticSource(object):
|
|||||||
def create_span_data(self, apm_cli, task_data, host_data):
|
def create_span_data(self, apm_cli, task_data, host_data):
|
||||||
""" create the span with the given TaskData and HostData """
|
""" create the span with the given TaskData and HostData """
|
||||||
|
|
||||||
name = '[%s] %s: %s' % (host_data.name, task_data.play, task_data.name)
|
name = f'[{host_data.name}] {task_data.play}: {task_data.name}'
|
||||||
|
|
||||||
message = "success"
|
message = "success"
|
||||||
status = "success"
|
status = "success"
|
||||||
@@ -259,7 +257,7 @@ class ElasticSource(object):
|
|||||||
"ansible.task.host.status": host_data.status}) as span:
|
"ansible.task.host.status": host_data.status}) as span:
|
||||||
span.outcome = status
|
span.outcome = status
|
||||||
if 'failure' in status:
|
if 'failure' in status:
|
||||||
exception = AnsibleRuntimeError(message="{0}: {1} failed with error message {2}".format(task_data.action, name, enriched_error_message))
|
exception = AnsibleRuntimeError(message=f"{task_data.action}: {name} failed with error message {enriched_error_message}")
|
||||||
apm_cli.capture_exception(exc_info=(type(exception), exception, exception.__traceback__), handled=True)
|
apm_cli.capture_exception(exc_info=(type(exception), exception, exception.__traceback__), handled=True)
|
||||||
|
|
||||||
def init_apm_client(self, apm_server_url, apm_service_name, apm_verify_server_cert, apm_secret_token, apm_api_key):
|
def init_apm_client(self, apm_server_url, apm_service_name, apm_verify_server_cert, apm_secret_token, apm_api_key):
|
||||||
@@ -288,7 +286,7 @@ class ElasticSource(object):
|
|||||||
message = result.get('msg', 'failed')
|
message = result.get('msg', 'failed')
|
||||||
exception = result.get('exception')
|
exception = result.get('exception')
|
||||||
stderr = result.get('stderr')
|
stderr = result.get('stderr')
|
||||||
return ('message: "{0}"\nexception: "{1}"\nstderr: "{2}"').format(message, exception, stderr)
|
return f"message: \"{message}\"\nexception: \"{exception}\"\nstderr: \"{stderr}\""
|
||||||
|
|
||||||
|
|
||||||
class CallbackModule(CallbackBase):
|
class CallbackModule(CallbackBase):
|
||||||
@@ -313,9 +311,7 @@ class CallbackModule(CallbackBase):
|
|||||||
self.disabled = False
|
self.disabled = False
|
||||||
|
|
||||||
if ELASTIC_LIBRARY_IMPORT_ERROR:
|
if ELASTIC_LIBRARY_IMPORT_ERROR:
|
||||||
raise_from(
|
raise AnsibleError('The `elastic-apm` must be installed to use this plugin') from ELASTIC_LIBRARY_IMPORT_ERROR
|
||||||
AnsibleError('The `elastic-apm` must be installed to use this plugin'),
|
|
||||||
ELASTIC_LIBRARY_IMPORT_ERROR)
|
|
||||||
|
|
||||||
self.tasks_data = OrderedDict()
|
self.tasks_data = OrderedDict()
|
||||||
|
|
||||||
|
|||||||
@@ -1,240 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright (c) 2014, Matt Martz <matt@sivel.net>
|
|
||||||
# Copyright (c) 2017 Ansible Project
|
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
|
||||||
author: Unknown (!UNKNOWN)
|
|
||||||
name: hipchat
|
|
||||||
type: notification
|
|
||||||
requirements:
|
|
||||||
- whitelist in configuration.
|
|
||||||
- prettytable (python lib)
|
|
||||||
short_description: post task events to hipchat
|
|
||||||
description:
|
|
||||||
- This callback plugin sends status updates to a HipChat channel during playbook execution.
|
|
||||||
- Before 2.4 only environment variables were available for configuring this plugin.
|
|
||||||
deprecated:
|
|
||||||
removed_in: 10.0.0
|
|
||||||
why: The hipchat service has been discontinued and the self-hosted variant has been End of Life since 2020.
|
|
||||||
alternative: There is none.
|
|
||||||
options:
|
|
||||||
token:
|
|
||||||
description: HipChat API token for v1 or v2 API.
|
|
||||||
type: str
|
|
||||||
required: true
|
|
||||||
env:
|
|
||||||
- name: HIPCHAT_TOKEN
|
|
||||||
ini:
|
|
||||||
- section: callback_hipchat
|
|
||||||
key: token
|
|
||||||
api_version:
|
|
||||||
description: HipChat API version, v1 or v2.
|
|
||||||
type: str
|
|
||||||
choices:
|
|
||||||
- v1
|
|
||||||
- v2
|
|
||||||
required: false
|
|
||||||
default: v1
|
|
||||||
env:
|
|
||||||
- name: HIPCHAT_API_VERSION
|
|
||||||
ini:
|
|
||||||
- section: callback_hipchat
|
|
||||||
key: api_version
|
|
||||||
room:
|
|
||||||
description: HipChat room to post in.
|
|
||||||
type: str
|
|
||||||
default: ansible
|
|
||||||
env:
|
|
||||||
- name: HIPCHAT_ROOM
|
|
||||||
ini:
|
|
||||||
- section: callback_hipchat
|
|
||||||
key: room
|
|
||||||
from:
|
|
||||||
description: Name to post as
|
|
||||||
type: str
|
|
||||||
default: ansible
|
|
||||||
env:
|
|
||||||
- name: HIPCHAT_FROM
|
|
||||||
ini:
|
|
||||||
- section: callback_hipchat
|
|
||||||
key: from
|
|
||||||
notify:
|
|
||||||
description: Add notify flag to important messages
|
|
||||||
type: bool
|
|
||||||
default: true
|
|
||||||
env:
|
|
||||||
- name: HIPCHAT_NOTIFY
|
|
||||||
ini:
|
|
||||||
- section: callback_hipchat
|
|
||||||
key: notify
|
|
||||||
|
|
||||||
'''
|
|
||||||
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
|
|
||||||
try:
|
|
||||||
import prettytable
|
|
||||||
HAS_PRETTYTABLE = True
|
|
||||||
except ImportError:
|
|
||||||
HAS_PRETTYTABLE = False
|
|
||||||
|
|
||||||
from ansible.plugins.callback import CallbackBase
|
|
||||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
|
||||||
from ansible.module_utils.urls import open_url
|
|
||||||
|
|
||||||
|
|
||||||
class CallbackModule(CallbackBase):
|
|
||||||
"""This is an example ansible callback plugin that sends status
|
|
||||||
updates to a HipChat channel during playbook execution.
|
|
||||||
"""
|
|
||||||
|
|
||||||
CALLBACK_VERSION = 2.0
|
|
||||||
CALLBACK_TYPE = 'notification'
|
|
||||||
CALLBACK_NAME = 'community.general.hipchat'
|
|
||||||
CALLBACK_NEEDS_WHITELIST = True
|
|
||||||
|
|
||||||
API_V1_URL = 'https://api.hipchat.com/v1/rooms/message'
|
|
||||||
API_V2_URL = 'https://api.hipchat.com/v2/'
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
|
|
||||||
super(CallbackModule, self).__init__()
|
|
||||||
|
|
||||||
if not HAS_PRETTYTABLE:
|
|
||||||
self.disabled = True
|
|
||||||
self._display.warning('The `prettytable` python module is not installed. '
|
|
||||||
'Disabling the HipChat callback plugin.')
|
|
||||||
self.printed_playbook = False
|
|
||||||
self.playbook_name = None
|
|
||||||
self.play = None
|
|
||||||
|
|
||||||
def set_options(self, task_keys=None, var_options=None, direct=None):
|
|
||||||
super(CallbackModule, self).set_options(task_keys=task_keys, var_options=var_options, direct=direct)
|
|
||||||
|
|
||||||
self.token = self.get_option('token')
|
|
||||||
self.api_version = self.get_option('api_version')
|
|
||||||
self.from_name = self.get_option('from')
|
|
||||||
self.allow_notify = self.get_option('notify')
|
|
||||||
self.room = self.get_option('room')
|
|
||||||
|
|
||||||
if self.token is None:
|
|
||||||
self.disabled = True
|
|
||||||
self._display.warning('HipChat token could not be loaded. The HipChat '
|
|
||||||
'token can be provided using the `HIPCHAT_TOKEN` '
|
|
||||||
'environment variable.')
|
|
||||||
|
|
||||||
# Pick the request handler.
|
|
||||||
if self.api_version == 'v2':
|
|
||||||
self.send_msg = self.send_msg_v2
|
|
||||||
else:
|
|
||||||
self.send_msg = self.send_msg_v1
|
|
||||||
|
|
||||||
def send_msg_v2(self, msg, msg_format='text', color='yellow', notify=False):
|
|
||||||
"""Method for sending a message to HipChat"""
|
|
||||||
|
|
||||||
headers = {'Authorization': 'Bearer %s' % self.token, 'Content-Type': 'application/json'}
|
|
||||||
|
|
||||||
body = {}
|
|
||||||
body['room_id'] = self.room
|
|
||||||
body['from'] = self.from_name[:15] # max length is 15
|
|
||||||
body['message'] = msg
|
|
||||||
body['message_format'] = msg_format
|
|
||||||
body['color'] = color
|
|
||||||
body['notify'] = self.allow_notify and notify
|
|
||||||
|
|
||||||
data = json.dumps(body)
|
|
||||||
url = self.API_V2_URL + "room/{room_id}/notification".format(room_id=self.room)
|
|
||||||
try:
|
|
||||||
response = open_url(url, data=data, headers=headers, method='POST')
|
|
||||||
return response.read()
|
|
||||||
except Exception as ex:
|
|
||||||
self._display.warning('Could not submit message to hipchat: {0}'.format(ex))
|
|
||||||
|
|
||||||
def send_msg_v1(self, msg, msg_format='text', color='yellow', notify=False):
|
|
||||||
"""Method for sending a message to HipChat"""
|
|
||||||
|
|
||||||
params = {}
|
|
||||||
params['room_id'] = self.room
|
|
||||||
params['from'] = self.from_name[:15] # max length is 15
|
|
||||||
params['message'] = msg
|
|
||||||
params['message_format'] = msg_format
|
|
||||||
params['color'] = color
|
|
||||||
params['notify'] = int(self.allow_notify and notify)
|
|
||||||
|
|
||||||
url = ('%s?auth_token=%s' % (self.API_V1_URL, self.token))
|
|
||||||
try:
|
|
||||||
response = open_url(url, data=urlencode(params))
|
|
||||||
return response.read()
|
|
||||||
except Exception as ex:
|
|
||||||
self._display.warning('Could not submit message to hipchat: {0}'.format(ex))
|
|
||||||
|
|
||||||
def v2_playbook_on_play_start(self, play):
|
|
||||||
"""Display Playbook and play start messages"""
|
|
||||||
|
|
||||||
self.play = play
|
|
||||||
name = play.name
|
|
||||||
# This block sends information about a playbook when it starts
|
|
||||||
# The playbook object is not immediately available at
|
|
||||||
# playbook_on_start so we grab it via the play
|
|
||||||
#
|
|
||||||
# Displays info about playbook being started by a person on an
|
|
||||||
# inventory, as well as Tags, Skip Tags and Limits
|
|
||||||
if not self.printed_playbook:
|
|
||||||
self.playbook_name, dummy = os.path.splitext(os.path.basename(self.play.playbook.filename))
|
|
||||||
host_list = self.play.playbook.inventory.host_list
|
|
||||||
inventory = os.path.basename(os.path.realpath(host_list))
|
|
||||||
self.send_msg("%s: Playbook initiated by %s against %s" %
|
|
||||||
(self.playbook_name,
|
|
||||||
self.play.playbook.remote_user,
|
|
||||||
inventory), notify=True)
|
|
||||||
self.printed_playbook = True
|
|
||||||
subset = self.play.playbook.inventory._subset
|
|
||||||
skip_tags = self.play.playbook.skip_tags
|
|
||||||
self.send_msg("%s:\nTags: %s\nSkip Tags: %s\nLimit: %s" %
|
|
||||||
(self.playbook_name,
|
|
||||||
', '.join(self.play.playbook.only_tags),
|
|
||||||
', '.join(skip_tags) if skip_tags else None,
|
|
||||||
', '.join(subset) if subset else subset))
|
|
||||||
|
|
||||||
# This is where we actually say we are starting a play
|
|
||||||
self.send_msg("%s: Starting play: %s" %
|
|
||||||
(self.playbook_name, name))
|
|
||||||
|
|
||||||
def playbook_on_stats(self, stats):
|
|
||||||
"""Display info about playbook statistics"""
|
|
||||||
hosts = sorted(stats.processed.keys())
|
|
||||||
|
|
||||||
t = prettytable.PrettyTable(['Host', 'Ok', 'Changed', 'Unreachable',
|
|
||||||
'Failures'])
|
|
||||||
|
|
||||||
failures = False
|
|
||||||
unreachable = False
|
|
||||||
|
|
||||||
for h in hosts:
|
|
||||||
s = stats.summarize(h)
|
|
||||||
|
|
||||||
if s['failures'] > 0:
|
|
||||||
failures = True
|
|
||||||
if s['unreachable'] > 0:
|
|
||||||
unreachable = True
|
|
||||||
|
|
||||||
t.add_row([h] + [s[k] for k in ['ok', 'changed', 'unreachable',
|
|
||||||
'failures']])
|
|
||||||
|
|
||||||
self.send_msg("%s: Playbook complete" % self.playbook_name,
|
|
||||||
notify=True)
|
|
||||||
|
|
||||||
if failures or unreachable:
|
|
||||||
color = 'red'
|
|
||||||
self.send_msg("%s: Failures detected" % self.playbook_name,
|
|
||||||
color=color, notify=True)
|
|
||||||
else:
|
|
||||||
color = 'green'
|
|
||||||
|
|
||||||
self.send_msg("/code %s:\n%s" % (self.playbook_name, t), color=color)
|
|
||||||
@@ -4,45 +4,44 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: jabber
|
name: jabber
|
||||||
type: notification
|
type: notification
|
||||||
short_description: post task events to a jabber server
|
short_description: post task events to a Jabber server
|
||||||
description:
|
description:
|
||||||
- The chatty part of ChatOps with a Hipchat server as a target.
|
- The chatty part of ChatOps with a Hipchat server as a target.
|
||||||
- This callback plugin sends status updates to a HipChat channel during playbook execution.
|
- This callback plugin sends status updates to a HipChat channel during playbook execution.
|
||||||
requirements:
|
requirements:
|
||||||
- xmpp (Python library U(https://github.com/ArchipelProject/xmpppy))
|
- xmpp (Python library U(https://github.com/ArchipelProject/xmpppy))
|
||||||
options:
|
options:
|
||||||
server:
|
server:
|
||||||
description: connection info to jabber server
|
description: Connection info to Jabber server.
|
||||||
type: str
|
type: str
|
||||||
required: true
|
required: true
|
||||||
env:
|
env:
|
||||||
- name: JABBER_SERV
|
- name: JABBER_SERV
|
||||||
user:
|
user:
|
||||||
description: Jabber user to authenticate as
|
description: Jabber user to authenticate as.
|
||||||
type: str
|
type: str
|
||||||
required: true
|
required: true
|
||||||
env:
|
env:
|
||||||
- name: JABBER_USER
|
- name: JABBER_USER
|
||||||
password:
|
password:
|
||||||
description: Password for the user to the jabber server
|
description: Password for the user to the Jabber server.
|
||||||
type: str
|
type: str
|
||||||
required: true
|
required: true
|
||||||
env:
|
env:
|
||||||
- name: JABBER_PASS
|
- name: JABBER_PASS
|
||||||
to:
|
to:
|
||||||
description: chat identifier that will receive the message
|
description: Chat identifier that will receive the message.
|
||||||
type: str
|
type: str
|
||||||
required: true
|
required: true
|
||||||
env:
|
env:
|
||||||
- name: JABBER_TO
|
- name: JABBER_TO
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
@@ -102,7 +101,7 @@ class CallbackModule(CallbackBase):
|
|||||||
"""Display Playbook and play start messages"""
|
"""Display Playbook and play start messages"""
|
||||||
self.play = play
|
self.play = play
|
||||||
name = play.name
|
name = play.name
|
||||||
self.send_msg("Ansible starting play: %s" % (name))
|
self.send_msg(f"Ansible starting play: {name}")
|
||||||
|
|
||||||
def playbook_on_stats(self, stats):
|
def playbook_on_stats(self, stats):
|
||||||
name = self.play
|
name = self.play
|
||||||
@@ -118,7 +117,7 @@ class CallbackModule(CallbackBase):
|
|||||||
|
|
||||||
if failures or unreachable:
|
if failures or unreachable:
|
||||||
out = self.debug
|
out = self.debug
|
||||||
self.send_msg("%s: Failures detected \n%s \nHost: %s\n Failed at:\n%s" % (name, self.task, h, out))
|
self.send_msg(f"{name}: Failures detected \n{self.task} \nHost: {h}\n Failed at:\n{out}")
|
||||||
else:
|
else:
|
||||||
out = self.debug
|
out = self.debug
|
||||||
self.send_msg("Great! \n Playbook %s completed:\n%s \n Last task debug:\n %s" % (name, s, out))
|
self.send_msg(f"Great! \n Playbook {name} completed:\n{s} \n Last task debug:\n {out}")
|
||||||
|
|||||||
@@ -4,30 +4,29 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: log_plays
|
name: log_plays
|
||||||
type: notification
|
type: notification
|
||||||
short_description: write playbook output to log file
|
short_description: write playbook output to log file
|
||||||
description:
|
description:
|
||||||
- This callback writes playbook output to a file per host in the C(/var/log/ansible/hosts) directory.
|
- This callback writes playbook output to a file per host in the C(/var/log/ansible/hosts) directory.
|
||||||
requirements:
|
requirements:
|
||||||
- Whitelist in configuration
|
- Whitelist in configuration
|
||||||
- A writeable C(/var/log/ansible/hosts) directory by the user executing Ansible on the controller
|
- A writeable C(/var/log/ansible/hosts) directory by the user executing Ansible on the controller
|
||||||
options:
|
options:
|
||||||
log_folder:
|
log_folder:
|
||||||
default: /var/log/ansible/hosts
|
default: /var/log/ansible/hosts
|
||||||
description: The folder where log files will be created.
|
description: The folder where log files will be created.
|
||||||
type: str
|
type: str
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_LOG_FOLDER
|
- name: ANSIBLE_LOG_FOLDER
|
||||||
ini:
|
ini:
|
||||||
- section: callback_log_plays
|
- section: callback_log_plays
|
||||||
key: log_folder
|
key: log_folder
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
@@ -35,7 +34,7 @@ import json
|
|||||||
|
|
||||||
from ansible.utils.path import makedirs_safe
|
from ansible.utils.path import makedirs_safe
|
||||||
from ansible.module_utils.common.text.converters import to_bytes
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
from ansible.module_utils.common._collections_compat import MutableMapping
|
from collections.abc import MutableMapping
|
||||||
from ansible.parsing.ajson import AnsibleJSONEncoder
|
from ansible.parsing.ajson import AnsibleJSONEncoder
|
||||||
from ansible.plugins.callback import CallbackBase
|
from ansible.plugins.callback import CallbackBase
|
||||||
|
|
||||||
@@ -57,7 +56,10 @@ class CallbackModule(CallbackBase):
|
|||||||
CALLBACK_NEEDS_WHITELIST = True
|
CALLBACK_NEEDS_WHITELIST = True
|
||||||
|
|
||||||
TIME_FORMAT = "%b %d %Y %H:%M:%S"
|
TIME_FORMAT = "%b %d %Y %H:%M:%S"
|
||||||
MSG_FORMAT = "%(now)s - %(playbook)s - %(task_name)s - %(task_action)s - %(category)s - %(data)s\n\n"
|
|
||||||
|
@staticmethod
|
||||||
|
def _make_msg(now, playbook, task_name, task_action, category, data):
|
||||||
|
return f"{now} - {playbook} - {task_name} - {task_action} - {category} - {data}\n\n"
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
||||||
@@ -82,22 +84,12 @@ class CallbackModule(CallbackBase):
|
|||||||
invocation = data.pop('invocation', None)
|
invocation = data.pop('invocation', None)
|
||||||
data = json.dumps(data, cls=AnsibleJSONEncoder)
|
data = json.dumps(data, cls=AnsibleJSONEncoder)
|
||||||
if invocation is not None:
|
if invocation is not None:
|
||||||
data = json.dumps(invocation) + " => %s " % data
|
data = f"{json.dumps(invocation)} => {data} "
|
||||||
|
|
||||||
path = os.path.join(self.log_folder, result._host.get_name())
|
path = os.path.join(self.log_folder, result._host.get_name())
|
||||||
now = time.strftime(self.TIME_FORMAT, time.localtime())
|
now = time.strftime(self.TIME_FORMAT, time.localtime())
|
||||||
|
|
||||||
msg = to_bytes(
|
msg = to_bytes(self._make_msg(now, self.playbook, result._task.name, result._task.action, category, data))
|
||||||
self.MSG_FORMAT
|
|
||||||
% dict(
|
|
||||||
now=now,
|
|
||||||
playbook=self.playbook,
|
|
||||||
task_name=result._task.name,
|
|
||||||
task_action=result._task.action,
|
|
||||||
category=category,
|
|
||||||
data=data,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
with open(path, "ab") as fd:
|
with open(path, "ab") as fd:
|
||||||
fd.write(msg)
|
fd.write(msg)
|
||||||
|
|
||||||
|
|||||||
@@ -3,44 +3,43 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: loganalytics
|
name: loganalytics
|
||||||
type: notification
|
type: notification
|
||||||
short_description: Posts task results to Azure Log Analytics
|
short_description: Posts task results to Azure Log Analytics
|
||||||
author: "Cyrus Li (@zhcli) <cyrus1006@gmail.com>"
|
author: "Cyrus Li (@zhcli) <cyrus1006@gmail.com>"
|
||||||
description:
|
description:
|
||||||
- This callback plugin will post task results in JSON formatted to an Azure Log Analytics workspace.
|
- This callback plugin will post task results in JSON formatted to an Azure Log Analytics workspace.
|
||||||
- Credits to authors of splunk callback plugin.
|
- Credits to authors of splunk callback plugin.
|
||||||
version_added: "2.4.0"
|
version_added: "2.4.0"
|
||||||
requirements:
|
requirements:
|
||||||
- Whitelisting this callback plugin.
|
- Whitelisting this callback plugin.
|
||||||
- An Azure log analytics work space has been established.
|
- An Azure log analytics work space has been established.
|
||||||
options:
|
options:
|
||||||
workspace_id:
|
workspace_id:
|
||||||
description: Workspace ID of the Azure log analytics workspace.
|
description: Workspace ID of the Azure log analytics workspace.
|
||||||
type: str
|
type: str
|
||||||
required: true
|
required: true
|
||||||
env:
|
env:
|
||||||
- name: WORKSPACE_ID
|
- name: WORKSPACE_ID
|
||||||
ini:
|
ini:
|
||||||
- section: callback_loganalytics
|
- section: callback_loganalytics
|
||||||
key: workspace_id
|
key: workspace_id
|
||||||
shared_key:
|
shared_key:
|
||||||
description: Shared key to connect to Azure log analytics workspace.
|
description: Shared key to connect to Azure log analytics workspace.
|
||||||
type: str
|
type: str
|
||||||
required: true
|
required: true
|
||||||
env:
|
env:
|
||||||
- name: WORKSPACE_SHARED_KEY
|
- name: WORKSPACE_SHARED_KEY
|
||||||
ini:
|
ini:
|
||||||
- section: callback_loganalytics
|
- section: callback_loganalytics
|
||||||
key: shared_key
|
key: shared_key
|
||||||
'''
|
"""
|
||||||
|
|
||||||
EXAMPLES = '''
|
EXAMPLES = r"""
|
||||||
examples: |
|
examples: |-
|
||||||
Whitelist the plugin in ansible.cfg:
|
Whitelist the plugin in ansible.cfg:
|
||||||
[defaults]
|
[defaults]
|
||||||
callback_whitelist = community.general.loganalytics
|
callback_whitelist = community.general.loganalytics
|
||||||
@@ -51,7 +50,7 @@ examples: |
|
|||||||
[callback_loganalytics]
|
[callback_loganalytics]
|
||||||
workspace_id = 01234567-0123-0123-0123-01234567890a
|
workspace_id = 01234567-0123-0123-0123-01234567890a
|
||||||
shared_key = dZD0kCbKl3ehZG6LHFMuhtE0yHiFCmetzFMc2u+roXIUQuatqU924SsAAAAPemhjbGlAemhjbGktTUJQAQIDBA==
|
shared_key = dZD0kCbKl3ehZG6LHFMuhtE0yHiFCmetzFMc2u+roXIUQuatqU924SsAAAAPemhjbGlAemhjbGktTUJQAQIDBA==
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import hmac
|
import hmac
|
||||||
@@ -84,18 +83,17 @@ class AzureLogAnalyticsSource(object):
|
|||||||
|
|
||||||
def __build_signature(self, date, workspace_id, shared_key, content_length):
|
def __build_signature(self, date, workspace_id, shared_key, content_length):
|
||||||
# Build authorisation signature for Azure log analytics API call
|
# Build authorisation signature for Azure log analytics API call
|
||||||
sigs = "POST\n{0}\napplication/json\nx-ms-date:{1}\n/api/logs".format(
|
sigs = f"POST\n{content_length}\napplication/json\nx-ms-date:{date}\n/api/logs"
|
||||||
str(content_length), date)
|
|
||||||
utf8_sigs = sigs.encode('utf-8')
|
utf8_sigs = sigs.encode('utf-8')
|
||||||
decoded_shared_key = base64.b64decode(shared_key)
|
decoded_shared_key = base64.b64decode(shared_key)
|
||||||
hmac_sha256_sigs = hmac.new(
|
hmac_sha256_sigs = hmac.new(
|
||||||
decoded_shared_key, utf8_sigs, digestmod=hashlib.sha256).digest()
|
decoded_shared_key, utf8_sigs, digestmod=hashlib.sha256).digest()
|
||||||
encoded_hash = base64.b64encode(hmac_sha256_sigs).decode('utf-8')
|
encoded_hash = base64.b64encode(hmac_sha256_sigs).decode('utf-8')
|
||||||
signature = "SharedKey {0}:{1}".format(workspace_id, encoded_hash)
|
signature = f"SharedKey {workspace_id}:{encoded_hash}"
|
||||||
return signature
|
return signature
|
||||||
|
|
||||||
def __build_workspace_url(self, workspace_id):
|
def __build_workspace_url(self, workspace_id):
|
||||||
return "https://{0}.ods.opinsights.azure.com/api/logs?api-version=2016-04-01".format(workspace_id)
|
return f"https://{workspace_id}.ods.opinsights.azure.com/api/logs?api-version=2016-04-01"
|
||||||
|
|
||||||
def __rfc1123date(self):
|
def __rfc1123date(self):
|
||||||
return now().strftime('%a, %d %b %Y %H:%M:%S GMT')
|
return now().strftime('%a, %d %b %Y %H:%M:%S GMT')
|
||||||
|
|||||||
@@ -3,59 +3,58 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: logdna
|
name: logdna
|
||||||
type: notification
|
type: notification
|
||||||
short_description: Sends playbook logs to LogDNA
|
short_description: Sends playbook logs to LogDNA
|
||||||
description:
|
description:
|
||||||
- This callback will report logs from playbook actions, tasks, and events to LogDNA (U(https://app.logdna.com)).
|
- This callback will report logs from playbook actions, tasks, and events to LogDNA (U(https://app.logdna.com)).
|
||||||
requirements:
|
requirements:
|
||||||
- LogDNA Python Library (U(https://github.com/logdna/python))
|
- LogDNA Python Library (U(https://github.com/logdna/python))
|
||||||
- whitelisting in configuration
|
- whitelisting in configuration
|
||||||
options:
|
options:
|
||||||
conf_key:
|
conf_key:
|
||||||
required: true
|
required: true
|
||||||
description: LogDNA Ingestion Key.
|
description: LogDNA Ingestion Key.
|
||||||
type: string
|
type: string
|
||||||
env:
|
env:
|
||||||
- name: LOGDNA_INGESTION_KEY
|
- name: LOGDNA_INGESTION_KEY
|
||||||
ini:
|
ini:
|
||||||
- section: callback_logdna
|
- section: callback_logdna
|
||||||
key: conf_key
|
key: conf_key
|
||||||
plugin_ignore_errors:
|
plugin_ignore_errors:
|
||||||
required: false
|
required: false
|
||||||
description: Whether to ignore errors on failing or not.
|
description: Whether to ignore errors on failing or not.
|
||||||
type: boolean
|
type: boolean
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_IGNORE_ERRORS
|
- name: ANSIBLE_IGNORE_ERRORS
|
||||||
ini:
|
ini:
|
||||||
- section: callback_logdna
|
- section: callback_logdna
|
||||||
key: plugin_ignore_errors
|
key: plugin_ignore_errors
|
||||||
default: false
|
default: false
|
||||||
conf_hostname:
|
conf_hostname:
|
||||||
required: false
|
required: false
|
||||||
description: Alternative Host Name; the current host name by default.
|
description: Alternative Host Name; the current host name by default.
|
||||||
type: string
|
type: string
|
||||||
env:
|
env:
|
||||||
- name: LOGDNA_HOSTNAME
|
- name: LOGDNA_HOSTNAME
|
||||||
ini:
|
ini:
|
||||||
- section: callback_logdna
|
- section: callback_logdna
|
||||||
key: conf_hostname
|
key: conf_hostname
|
||||||
conf_tags:
|
conf_tags:
|
||||||
required: false
|
required: false
|
||||||
description: Tags.
|
description: Tags.
|
||||||
type: string
|
type: string
|
||||||
env:
|
env:
|
||||||
- name: LOGDNA_TAGS
|
- name: LOGDNA_TAGS
|
||||||
ini:
|
ini:
|
||||||
- section: callback_logdna
|
- section: callback_logdna
|
||||||
key: conf_tags
|
key: conf_tags
|
||||||
default: ansible
|
default: ansible
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
@@ -73,7 +72,7 @@ except ImportError:
|
|||||||
|
|
||||||
# Getting MAC Address of system:
|
# Getting MAC Address of system:
|
||||||
def get_mac():
|
def get_mac():
|
||||||
mac = "%012x" % getnode()
|
mac = f"{getnode():012x}"
|
||||||
return ":".join(map(lambda index: mac[index:index + 2], range(int(len(mac) / 2))))
|
return ":".join(map(lambda index: mac[index:index + 2], range(int(len(mac) / 2))))
|
||||||
|
|
||||||
|
|
||||||
@@ -161,7 +160,7 @@ class CallbackModule(CallbackBase):
|
|||||||
if ninvalidKeys > 0:
|
if ninvalidKeys > 0:
|
||||||
for key in invalidKeys:
|
for key in invalidKeys:
|
||||||
del meta[key]
|
del meta[key]
|
||||||
meta['__errors'] = 'These keys have been sanitized: ' + ', '.join(invalidKeys)
|
meta['__errors'] = f"These keys have been sanitized: {', '.join(invalidKeys)}"
|
||||||
return meta
|
return meta
|
||||||
|
|
||||||
def sanitizeJSON(self, data):
|
def sanitizeJSON(self, data):
|
||||||
|
|||||||
@@ -3,82 +3,79 @@
|
|||||||
# Copyright (c) 2017 Ansible Project
|
# Copyright (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: logentries
|
name: logentries
|
||||||
type: notification
|
type: notification
|
||||||
short_description: Sends events to Logentries
|
short_description: Sends events to Logentries
|
||||||
|
description:
|
||||||
|
- This callback plugin will generate JSON objects and send them to Logentries using TCP for auditing/debugging purposes.
|
||||||
|
requirements:
|
||||||
|
- whitelisting in configuration
|
||||||
|
- certifi (Python library)
|
||||||
|
- flatdict (Python library), if you want to use the O(flatten) option
|
||||||
|
options:
|
||||||
|
api:
|
||||||
|
description: URI to the Logentries API.
|
||||||
|
type: str
|
||||||
|
env:
|
||||||
|
- name: LOGENTRIES_API
|
||||||
|
default: data.logentries.com
|
||||||
|
ini:
|
||||||
|
- section: callback_logentries
|
||||||
|
key: api
|
||||||
|
port:
|
||||||
|
description: HTTP port to use when connecting to the API.
|
||||||
|
type: int
|
||||||
|
env:
|
||||||
|
- name: LOGENTRIES_PORT
|
||||||
|
default: 80
|
||||||
|
ini:
|
||||||
|
- section: callback_logentries
|
||||||
|
key: port
|
||||||
|
tls_port:
|
||||||
|
description: Port to use when connecting to the API when TLS is enabled.
|
||||||
|
type: int
|
||||||
|
env:
|
||||||
|
- name: LOGENTRIES_TLS_PORT
|
||||||
|
default: 443
|
||||||
|
ini:
|
||||||
|
- section: callback_logentries
|
||||||
|
key: tls_port
|
||||||
|
token:
|
||||||
|
description: The logentries C(TCP token).
|
||||||
|
type: str
|
||||||
|
env:
|
||||||
|
- name: LOGENTRIES_ANSIBLE_TOKEN
|
||||||
|
required: true
|
||||||
|
ini:
|
||||||
|
- section: callback_logentries
|
||||||
|
key: token
|
||||||
|
use_tls:
|
||||||
description:
|
description:
|
||||||
- This callback plugin will generate JSON objects and send them to Logentries via TCP for auditing/debugging purposes.
|
- Toggle to decide whether to use TLS to encrypt the communications with the API server.
|
||||||
- Before 2.4, if you wanted to use an ini configuration, the file must be placed in the same directory as this plugin and named C(logentries.ini).
|
env:
|
||||||
- In 2.4 and above you can just put it in the main Ansible configuration file.
|
- name: LOGENTRIES_USE_TLS
|
||||||
requirements:
|
default: false
|
||||||
- whitelisting in configuration
|
type: boolean
|
||||||
- certifi (Python library)
|
ini:
|
||||||
- flatdict (Python library), if you want to use the O(flatten) option
|
- section: callback_logentries
|
||||||
options:
|
key: use_tls
|
||||||
api:
|
flatten:
|
||||||
description: URI to the Logentries API.
|
description: Flatten complex data structures into a single dictionary with complex keys.
|
||||||
type: str
|
type: boolean
|
||||||
env:
|
default: false
|
||||||
- name: LOGENTRIES_API
|
env:
|
||||||
default: data.logentries.com
|
- name: LOGENTRIES_FLATTEN
|
||||||
ini:
|
ini:
|
||||||
- section: callback_logentries
|
- section: callback_logentries
|
||||||
key: api
|
key: flatten
|
||||||
port:
|
"""
|
||||||
description: HTTP port to use when connecting to the API.
|
|
||||||
type: int
|
|
||||||
env:
|
|
||||||
- name: LOGENTRIES_PORT
|
|
||||||
default: 80
|
|
||||||
ini:
|
|
||||||
- section: callback_logentries
|
|
||||||
key: port
|
|
||||||
tls_port:
|
|
||||||
description: Port to use when connecting to the API when TLS is enabled.
|
|
||||||
type: int
|
|
||||||
env:
|
|
||||||
- name: LOGENTRIES_TLS_PORT
|
|
||||||
default: 443
|
|
||||||
ini:
|
|
||||||
- section: callback_logentries
|
|
||||||
key: tls_port
|
|
||||||
token:
|
|
||||||
description: The logentries C(TCP token).
|
|
||||||
type: str
|
|
||||||
env:
|
|
||||||
- name: LOGENTRIES_ANSIBLE_TOKEN
|
|
||||||
required: true
|
|
||||||
ini:
|
|
||||||
- section: callback_logentries
|
|
||||||
key: token
|
|
||||||
use_tls:
|
|
||||||
description:
|
|
||||||
- Toggle to decide whether to use TLS to encrypt the communications with the API server.
|
|
||||||
env:
|
|
||||||
- name: LOGENTRIES_USE_TLS
|
|
||||||
default: false
|
|
||||||
type: boolean
|
|
||||||
ini:
|
|
||||||
- section: callback_logentries
|
|
||||||
key: use_tls
|
|
||||||
flatten:
|
|
||||||
description: Flatten complex data structures into a single dictionary with complex keys.
|
|
||||||
type: boolean
|
|
||||||
default: false
|
|
||||||
env:
|
|
||||||
- name: LOGENTRIES_FLATTEN
|
|
||||||
ini:
|
|
||||||
- section: callback_logentries
|
|
||||||
key: flatten
|
|
||||||
'''
|
|
||||||
|
|
||||||
EXAMPLES = '''
|
EXAMPLES = r"""
|
||||||
examples: >
|
examples: >-
|
||||||
To enable, add this to your ansible.cfg file in the defaults block
|
To enable, add this to your ansible.cfg file in the defaults block
|
||||||
|
|
||||||
[defaults]
|
[defaults]
|
||||||
@@ -97,7 +94,7 @@ examples: >
|
|||||||
use_tls = true
|
use_tls = true
|
||||||
token = dd21fc88-f00a-43ff-b977-e3a4233c53af
|
token = dd21fc88-f00a-43ff-b977-e3a4233c53af
|
||||||
flatten = false
|
flatten = false
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import socket
|
import socket
|
||||||
@@ -135,7 +132,7 @@ class PlainTextSocketAppender(object):
|
|||||||
# Error message displayed when an incorrect Token has been detected
|
# Error message displayed when an incorrect Token has been detected
|
||||||
self.INVALID_TOKEN = "\n\nIt appears the LOGENTRIES_TOKEN parameter you entered is incorrect!\n\n"
|
self.INVALID_TOKEN = "\n\nIt appears the LOGENTRIES_TOKEN parameter you entered is incorrect!\n\n"
|
||||||
# Unicode Line separator character \u2028
|
# Unicode Line separator character \u2028
|
||||||
self.LINE_SEP = u'\u2028'
|
self.LINE_SEP = '\u2028'
|
||||||
|
|
||||||
self._display = display
|
self._display = display
|
||||||
self._conn = None
|
self._conn = None
|
||||||
@@ -153,7 +150,7 @@ class PlainTextSocketAppender(object):
|
|||||||
self.open_connection()
|
self.open_connection()
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self._display.vvvv(u"Unable to connect to Logentries: %s" % to_text(e))
|
self._display.vvvv(f"Unable to connect to Logentries: {e}")
|
||||||
|
|
||||||
root_delay *= 2
|
root_delay *= 2
|
||||||
if root_delay > self.MAX_DELAY:
|
if root_delay > self.MAX_DELAY:
|
||||||
@@ -162,7 +159,7 @@ class PlainTextSocketAppender(object):
|
|||||||
wait_for = root_delay + random.uniform(0, root_delay)
|
wait_for = root_delay + random.uniform(0, root_delay)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._display.vvvv("sleeping %s before retry" % wait_for)
|
self._display.vvvv(f"sleeping {wait_for} before retry")
|
||||||
time.sleep(wait_for)
|
time.sleep(wait_for)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
raise
|
raise
|
||||||
@@ -175,8 +172,8 @@ class PlainTextSocketAppender(object):
|
|||||||
# Replace newlines with Unicode line separator
|
# Replace newlines with Unicode line separator
|
||||||
# for multi-line events
|
# for multi-line events
|
||||||
data = to_text(data, errors='surrogate_or_strict')
|
data = to_text(data, errors='surrogate_or_strict')
|
||||||
multiline = data.replace(u'\n', self.LINE_SEP)
|
multiline = data.replace('\n', self.LINE_SEP)
|
||||||
multiline += u"\n"
|
multiline += "\n"
|
||||||
# Send data, reconnect if needed
|
# Send data, reconnect if needed
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
@@ -249,7 +246,7 @@ class CallbackModule(CallbackBase):
|
|||||||
self.use_tls = self.get_option('use_tls')
|
self.use_tls = self.get_option('use_tls')
|
||||||
self.flatten = self.get_option('flatten')
|
self.flatten = self.get_option('flatten')
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
self._display.warning(u"Missing option for Logentries callback plugin: %s" % to_text(e))
|
self._display.warning(f"Missing option for Logentries callback plugin: {e}")
|
||||||
self.disabled = True
|
self.disabled = True
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -268,10 +265,10 @@ class CallbackModule(CallbackBase):
|
|||||||
|
|
||||||
if not self.disabled:
|
if not self.disabled:
|
||||||
if self.use_tls:
|
if self.use_tls:
|
||||||
self._display.vvvv("Connecting to %s:%s with TLS" % (self.api_url, self.api_tls_port))
|
self._display.vvvv(f"Connecting to {self.api_url}:{self.api_tls_port} with TLS")
|
||||||
self._appender = TLSSocketAppender(display=self._display, LE_API=self.api_url, LE_TLS_PORT=self.api_tls_port)
|
self._appender = TLSSocketAppender(display=self._display, LE_API=self.api_url, LE_TLS_PORT=self.api_tls_port)
|
||||||
else:
|
else:
|
||||||
self._display.vvvv("Connecting to %s:%s" % (self.api_url, self.api_port))
|
self._display.vvvv(f"Connecting to {self.api_url}:{self.api_port}")
|
||||||
self._appender = PlainTextSocketAppender(display=self._display, LE_API=self.api_url, LE_PORT=self.api_port)
|
self._appender = PlainTextSocketAppender(display=self._display, LE_API=self.api_url, LE_PORT=self.api_port)
|
||||||
self._appender.reopen_connection()
|
self._appender.reopen_connection()
|
||||||
|
|
||||||
@@ -284,7 +281,7 @@ class CallbackModule(CallbackBase):
|
|||||||
|
|
||||||
def emit(self, record):
|
def emit(self, record):
|
||||||
msg = record.rstrip('\n')
|
msg = record.rstrip('\n')
|
||||||
msg = "{0} {1}".format(self.token, msg)
|
msg = f"{self.token} {msg}"
|
||||||
self._appender.put(msg)
|
self._appender.put(msg)
|
||||||
self._display.vvvv("Sent event to logentries")
|
self._display.vvvv("Sent event to logentries")
|
||||||
|
|
||||||
|
|||||||
@@ -4,98 +4,96 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = r'''
|
DOCUMENTATION = r"""
|
||||||
author: Yevhen Khmelenko (@ujenmr)
|
author: Yevhen Khmelenko (@ujenmr)
|
||||||
name: logstash
|
name: logstash
|
||||||
type: notification
|
type: notification
|
||||||
short_description: Sends events to Logstash
|
short_description: Sends events to Logstash
|
||||||
description:
|
description:
|
||||||
- This callback will report facts and task events to Logstash U(https://www.elastic.co/products/logstash).
|
- This callback will report facts and task events to Logstash U(https://www.elastic.co/products/logstash).
|
||||||
requirements:
|
requirements:
|
||||||
- whitelisting in configuration
|
- whitelisting in configuration
|
||||||
- logstash (Python library)
|
- logstash (Python library)
|
||||||
options:
|
options:
|
||||||
server:
|
server:
|
||||||
description: Address of the Logstash server.
|
description: Address of the Logstash server.
|
||||||
type: str
|
type: str
|
||||||
env:
|
env:
|
||||||
- name: LOGSTASH_SERVER
|
- name: LOGSTASH_SERVER
|
||||||
ini:
|
ini:
|
||||||
- section: callback_logstash
|
- section: callback_logstash
|
||||||
key: server
|
key: server
|
||||||
version_added: 1.0.0
|
version_added: 1.0.0
|
||||||
default: localhost
|
default: localhost
|
||||||
port:
|
port:
|
||||||
description: Port on which logstash is listening.
|
description: Port on which logstash is listening.
|
||||||
type: int
|
type: int
|
||||||
env:
|
env:
|
||||||
- name: LOGSTASH_PORT
|
- name: LOGSTASH_PORT
|
||||||
ini:
|
ini:
|
||||||
- section: callback_logstash
|
- section: callback_logstash
|
||||||
key: port
|
key: port
|
||||||
version_added: 1.0.0
|
version_added: 1.0.0
|
||||||
default: 5000
|
default: 5000
|
||||||
type:
|
type:
|
||||||
description: Message type.
|
description: Message type.
|
||||||
type: str
|
type: str
|
||||||
env:
|
env:
|
||||||
- name: LOGSTASH_TYPE
|
- name: LOGSTASH_TYPE
|
||||||
ini:
|
ini:
|
||||||
- section: callback_logstash
|
- section: callback_logstash
|
||||||
key: type
|
key: type
|
||||||
version_added: 1.0.0
|
version_added: 1.0.0
|
||||||
default: ansible
|
default: ansible
|
||||||
pre_command:
|
pre_command:
|
||||||
description: Executes command before run and its result is added to the C(ansible_pre_command_output) logstash field.
|
description: Executes command before run and its result is added to the C(ansible_pre_command_output) logstash field.
|
||||||
type: str
|
type: str
|
||||||
version_added: 2.0.0
|
version_added: 2.0.0
|
||||||
ini:
|
ini:
|
||||||
- section: callback_logstash
|
- section: callback_logstash
|
||||||
key: pre_command
|
key: pre_command
|
||||||
env:
|
env:
|
||||||
- name: LOGSTASH_PRE_COMMAND
|
- name: LOGSTASH_PRE_COMMAND
|
||||||
format_version:
|
format_version:
|
||||||
description: Logging format.
|
description: Logging format.
|
||||||
type: str
|
type: str
|
||||||
version_added: 2.0.0
|
version_added: 2.0.0
|
||||||
ini:
|
ini:
|
||||||
- section: callback_logstash
|
- section: callback_logstash
|
||||||
key: format_version
|
key: format_version
|
||||||
env:
|
env:
|
||||||
- name: LOGSTASH_FORMAT_VERSION
|
- name: LOGSTASH_FORMAT_VERSION
|
||||||
default: v1
|
default: v1
|
||||||
choices:
|
choices:
|
||||||
- v1
|
- v1
|
||||||
- v2
|
- v2
|
||||||
|
"""
|
||||||
|
|
||||||
'''
|
EXAMPLES = r"""
|
||||||
|
|
||||||
EXAMPLES = r'''
|
|
||||||
ansible.cfg: |
|
ansible.cfg: |
|
||||||
# Enable Callback plugin
|
# Enable Callback plugin
|
||||||
[defaults]
|
[defaults]
|
||||||
callback_whitelist = community.general.logstash
|
callback_whitelist = community.general.logstash
|
||||||
|
|
||||||
[callback_logstash]
|
[callback_logstash]
|
||||||
server = logstash.example.com
|
server = logstash.example.com
|
||||||
port = 5000
|
port = 5000
|
||||||
pre_command = git rev-parse HEAD
|
pre_command = git rev-parse HEAD
|
||||||
type = ansible
|
type = ansible
|
||||||
|
|
||||||
11-input-tcp.conf: |
|
11-input-tcp.conf: |-
|
||||||
# Enable Logstash TCP Input
|
# Enable Logstash TCP Input
|
||||||
input {
|
input {
|
||||||
tcp {
|
tcp {
|
||||||
port => 5000
|
port => 5000
|
||||||
codec => json
|
codec => json
|
||||||
add_field => { "[@metadata][beat]" => "notify" }
|
add_field => { "[@metadata][beat]" => "notify" }
|
||||||
add_field => { "[@metadata][type]" => "ansible" }
|
add_field => { "[@metadata][type]" => "ansible" }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
@@ -129,9 +127,7 @@ class CallbackModule(CallbackBase):
|
|||||||
|
|
||||||
if not HAS_LOGSTASH:
|
if not HAS_LOGSTASH:
|
||||||
self.disabled = True
|
self.disabled = True
|
||||||
self._display.warning("The required python-logstash/python3-logstash is not installed. "
|
self._display.warning("The required python3-logstash is not installed.")
|
||||||
"pip install python-logstash for Python 2"
|
|
||||||
"pip install python3-logstash for Python 3")
|
|
||||||
|
|
||||||
self.start_time = now()
|
self.start_time = now()
|
||||||
|
|
||||||
|
|||||||
@@ -4,84 +4,82 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: mail
|
name: mail
|
||||||
type: notification
|
type: notification
|
||||||
short_description: Sends failure events via email
|
short_description: Sends failure events through email
|
||||||
description:
|
description:
|
||||||
- This callback will report failures via email.
|
- This callback will report failures through email.
|
||||||
author:
|
author:
|
||||||
- Dag Wieers (@dagwieers)
|
- Dag Wieers (@dagwieers)
|
||||||
requirements:
|
requirements:
|
||||||
- whitelisting in configuration
|
- whitelisting in configuration
|
||||||
options:
|
options:
|
||||||
mta:
|
mta:
|
||||||
description:
|
description:
|
||||||
- Mail Transfer Agent, server that accepts SMTP.
|
- Mail Transfer Agent, server that accepts SMTP.
|
||||||
type: str
|
type: str
|
||||||
env:
|
env:
|
||||||
- name: SMTPHOST
|
- name: SMTPHOST
|
||||||
ini:
|
ini:
|
||||||
- section: callback_mail
|
- section: callback_mail
|
||||||
key: smtphost
|
key: smtphost
|
||||||
default: localhost
|
default: localhost
|
||||||
mtaport:
|
mtaport:
|
||||||
description:
|
description:
|
||||||
- Mail Transfer Agent Port.
|
- Mail Transfer Agent Port.
|
||||||
- Port at which server SMTP.
|
- Port at which server SMTP.
|
||||||
type: int
|
type: int
|
||||||
ini:
|
ini:
|
||||||
- section: callback_mail
|
- section: callback_mail
|
||||||
key: smtpport
|
key: smtpport
|
||||||
default: 25
|
default: 25
|
||||||
to:
|
to:
|
||||||
description:
|
description:
|
||||||
- Mail recipient.
|
- Mail recipient.
|
||||||
type: list
|
type: list
|
||||||
elements: str
|
elements: str
|
||||||
ini:
|
ini:
|
||||||
- section: callback_mail
|
- section: callback_mail
|
||||||
key: to
|
key: to
|
||||||
default: [root]
|
default: [root]
|
||||||
sender:
|
sender:
|
||||||
description:
|
description:
|
||||||
- Mail sender.
|
- Mail sender.
|
||||||
- This is required since community.general 6.0.0.
|
- This is required since community.general 6.0.0.
|
||||||
type: str
|
type: str
|
||||||
required: true
|
required: true
|
||||||
ini:
|
ini:
|
||||||
- section: callback_mail
|
- section: callback_mail
|
||||||
key: sender
|
key: sender
|
||||||
cc:
|
cc:
|
||||||
description:
|
description:
|
||||||
- CC'd recipients.
|
- CC'd recipients.
|
||||||
type: list
|
type: list
|
||||||
elements: str
|
elements: str
|
||||||
ini:
|
ini:
|
||||||
- section: callback_mail
|
- section: callback_mail
|
||||||
key: cc
|
key: cc
|
||||||
bcc:
|
bcc:
|
||||||
description:
|
description:
|
||||||
- BCC'd recipients.
|
- BCC'd recipients.
|
||||||
type: list
|
type: list
|
||||||
elements: str
|
elements: str
|
||||||
ini:
|
ini:
|
||||||
- section: callback_mail
|
- section: callback_mail
|
||||||
key: bcc
|
key: bcc
|
||||||
message_id_domain:
|
message_id_domain:
|
||||||
description:
|
description:
|
||||||
- The domain name to use for the L(Message-ID header, https://en.wikipedia.org/wiki/Message-ID).
|
- The domain name to use for the L(Message-ID header, https://en.wikipedia.org/wiki/Message-ID).
|
||||||
- The default is the hostname of the control node.
|
- The default is the hostname of the control node.
|
||||||
type: str
|
type: str
|
||||||
ini:
|
ini:
|
||||||
- section: callback_mail
|
- section: callback_mail
|
||||||
key: message_id_domain
|
key: message_id_domain
|
||||||
version_added: 8.2.0
|
version_added: 8.2.0
|
||||||
|
"""
|
||||||
'''
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
@@ -135,14 +133,14 @@ class CallbackModule(CallbackBase):
|
|||||||
if self.bcc:
|
if self.bcc:
|
||||||
bcc_addresses = email.utils.getaddresses(self.bcc)
|
bcc_addresses = email.utils.getaddresses(self.bcc)
|
||||||
|
|
||||||
content = 'Date: %s\n' % email.utils.formatdate()
|
content = f'Date: {email.utils.formatdate()}\n'
|
||||||
content += 'From: %s\n' % email.utils.formataddr(sender_address)
|
content += f'From: {email.utils.formataddr(sender_address)}\n'
|
||||||
if self.to:
|
if self.to:
|
||||||
content += 'To: %s\n' % ', '.join([email.utils.formataddr(pair) for pair in to_addresses])
|
content += f"To: {', '.join([email.utils.formataddr(pair) for pair in to_addresses])}\n"
|
||||||
if self.cc:
|
if self.cc:
|
||||||
content += 'Cc: %s\n' % ', '.join([email.utils.formataddr(pair) for pair in cc_addresses])
|
content += f"Cc: {', '.join([email.utils.formataddr(pair) for pair in cc_addresses])}\n"
|
||||||
content += 'Message-ID: %s\n' % email.utils.make_msgid(domain=self.get_option('message_id_domain'))
|
content += f"Message-ID: {email.utils.make_msgid(domain=self.get_option('message_id_domain'))}\n"
|
||||||
content += 'Subject: %s\n\n' % subject.strip()
|
content += f'Subject: {subject.strip()}\n\n'
|
||||||
content += body
|
content += body
|
||||||
|
|
||||||
addresses = to_addresses
|
addresses = to_addresses
|
||||||
@@ -159,23 +157,22 @@ class CallbackModule(CallbackBase):
|
|||||||
smtp.quit()
|
smtp.quit()
|
||||||
|
|
||||||
def subject_msg(self, multiline, failtype, linenr):
|
def subject_msg(self, multiline, failtype, linenr):
|
||||||
return '%s: %s' % (failtype, multiline.strip('\r\n').splitlines()[linenr])
|
msg = multiline.strip('\r\n').splitlines()[linenr]
|
||||||
|
return f'{failtype}: {msg}'
|
||||||
|
|
||||||
def indent(self, multiline, indent=8):
|
def indent(self, multiline, indent=8):
|
||||||
return re.sub('^', ' ' * indent, multiline, flags=re.MULTILINE)
|
return re.sub('^', ' ' * indent, multiline, flags=re.MULTILINE)
|
||||||
|
|
||||||
def body_blob(self, multiline, texttype):
|
def body_blob(self, multiline, texttype):
|
||||||
''' Turn some text output in a well-indented block for sending in a mail body '''
|
''' Turn some text output in a well-indented block for sending in a mail body '''
|
||||||
intro = 'with the following %s:\n\n' % texttype
|
intro = f'with the following {texttype}:\n\n'
|
||||||
blob = ''
|
blob = "\n".join(multiline.strip('\r\n').splitlines())
|
||||||
for line in multiline.strip('\r\n').splitlines():
|
return f"{intro}{self.indent(blob)}\n"
|
||||||
blob += '%s\n' % line
|
|
||||||
return intro + self.indent(blob) + '\n'
|
|
||||||
|
|
||||||
def mail_result(self, result, failtype):
|
def mail_result(self, result, failtype):
|
||||||
host = result._host.get_name()
|
host = result._host.get_name()
|
||||||
if not self.sender:
|
if not self.sender:
|
||||||
self.sender = '"Ansible: %s" <root>' % host
|
self.sender = f'"Ansible: {host}" <root>'
|
||||||
|
|
||||||
# Add subject
|
# Add subject
|
||||||
if self.itembody:
|
if self.itembody:
|
||||||
@@ -191,31 +188,32 @@ class CallbackModule(CallbackBase):
|
|||||||
elif result._result.get('exception'): # Unrelated exceptions are added to output :-/
|
elif result._result.get('exception'): # Unrelated exceptions are added to output :-/
|
||||||
subject = self.subject_msg(result._result['exception'], failtype, -1)
|
subject = self.subject_msg(result._result['exception'], failtype, -1)
|
||||||
else:
|
else:
|
||||||
subject = '%s: %s' % (failtype, result._task.name or result._task.action)
|
subject = f'{failtype}: {result._task.name or result._task.action}'
|
||||||
|
|
||||||
# Make playbook name visible (e.g. in Outlook/Gmail condensed view)
|
# Make playbook name visible (e.g. in Outlook/Gmail condensed view)
|
||||||
body = 'Playbook: %s\n' % os.path.basename(self.playbook._file_name)
|
body = f'Playbook: {os.path.basename(self.playbook._file_name)}\n'
|
||||||
if result._task.name:
|
if result._task.name:
|
||||||
body += 'Task: %s\n' % result._task.name
|
body += f'Task: {result._task.name}\n'
|
||||||
body += 'Module: %s\n' % result._task.action
|
body += f'Module: {result._task.action}\n'
|
||||||
body += 'Host: %s\n' % host
|
body += f'Host: {host}\n'
|
||||||
body += '\n'
|
body += '\n'
|
||||||
|
|
||||||
# Add task information (as much as possible)
|
# Add task information (as much as possible)
|
||||||
body += 'The following task failed:\n\n'
|
body += 'The following task failed:\n\n'
|
||||||
if 'invocation' in result._result:
|
if 'invocation' in result._result:
|
||||||
body += self.indent('%s: %s\n' % (result._task.action, json.dumps(result._result['invocation']['module_args'], indent=4)))
|
body += self.indent(f"{result._task.action}: {json.dumps(result._result['invocation']['module_args'], indent=4)}\n")
|
||||||
elif result._task.name:
|
elif result._task.name:
|
||||||
body += self.indent('%s (%s)\n' % (result._task.name, result._task.action))
|
body += self.indent(f'{result._task.name} ({result._task.action})\n')
|
||||||
else:
|
else:
|
||||||
body += self.indent('%s\n' % result._task.action)
|
body += self.indent(f'{result._task.action}\n')
|
||||||
body += '\n'
|
body += '\n'
|
||||||
|
|
||||||
# Add item / message
|
# Add item / message
|
||||||
if self.itembody:
|
if self.itembody:
|
||||||
body += self.itembody
|
body += self.itembody
|
||||||
elif result._result.get('failed_when_result') is True:
|
elif result._result.get('failed_when_result') is True:
|
||||||
body += "due to the following condition:\n\n" + self.indent('failed_when:\n- ' + '\n- '.join(result._task.failed_when)) + '\n\n'
|
fail_cond = self.indent('failed_when:\n- ' + '\n- '.join(result._task.failed_when))
|
||||||
|
body += f"due to the following condition:\n\n{fail_cond}\n\n"
|
||||||
elif result._result.get('msg'):
|
elif result._result.get('msg'):
|
||||||
body += self.body_blob(result._result['msg'], 'message')
|
body += self.body_blob(result._result['msg'], 'message')
|
||||||
|
|
||||||
@@ -228,13 +226,13 @@ class CallbackModule(CallbackBase):
|
|||||||
body += self.body_blob(result._result['exception'], 'exception')
|
body += self.body_blob(result._result['exception'], 'exception')
|
||||||
if result._result.get('warnings'):
|
if result._result.get('warnings'):
|
||||||
for i in range(len(result._result.get('warnings'))):
|
for i in range(len(result._result.get('warnings'))):
|
||||||
body += self.body_blob(result._result['warnings'][i], 'exception %d' % (i + 1))
|
body += self.body_blob(result._result['warnings'][i], f'exception {i + 1}')
|
||||||
if result._result.get('deprecations'):
|
if result._result.get('deprecations'):
|
||||||
for i in range(len(result._result.get('deprecations'))):
|
for i in range(len(result._result.get('deprecations'))):
|
||||||
body += self.body_blob(result._result['deprecations'][i], 'exception %d' % (i + 1))
|
body += self.body_blob(result._result['deprecations'][i], f'exception {i + 1}')
|
||||||
|
|
||||||
body += 'and a complete dump of the error:\n\n'
|
body += 'and a complete dump of the error:\n\n'
|
||||||
body += self.indent('%s: %s' % (failtype, json.dumps(result._result, cls=AnsibleJSONEncoder, indent=4)))
|
body += self.indent(f'{failtype}: {json.dumps(result._result, cls=AnsibleJSONEncoder, indent=4)}')
|
||||||
|
|
||||||
self.mail(subject=subject, body=body)
|
self.mail(subject=subject, body=body)
|
||||||
|
|
||||||
@@ -257,4 +255,4 @@ class CallbackModule(CallbackBase):
|
|||||||
def v2_runner_item_on_failed(self, result):
|
def v2_runner_item_on_failed(self, result):
|
||||||
# Pass item information to task failure
|
# Pass item information to task failure
|
||||||
self.itemsubject = result._result['msg']
|
self.itemsubject = result._result['msg']
|
||||||
self.itembody += self.body_blob(json.dumps(result._result, cls=AnsibleJSONEncoder, indent=4), "failed item dump '%(item)s'" % result._result)
|
self.itembody += self.body_blob(json.dumps(result._result, cls=AnsibleJSONEncoder, indent=4), f"failed item dump '{result._result['item']}'")
|
||||||
|
|||||||
@@ -4,70 +4,70 @@
|
|||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
# Make coding more python3-ish
|
# Make coding more python3-ish
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: nrdp
|
name: nrdp
|
||||||
type: notification
|
type: notification
|
||||||
author: "Remi VERCHERE (@rverchere)"
|
author: "Remi VERCHERE (@rverchere)"
|
||||||
short_description: Post task results to a Nagios server through nrdp
|
short_description: Post task results to a Nagios server through nrdp
|
||||||
description:
|
description:
|
||||||
- This callback send playbook result to Nagios.
|
- This callback send playbook result to Nagios.
|
||||||
- Nagios shall use NRDP to receive passive events.
|
- Nagios shall use NRDP to receive passive events.
|
||||||
- The passive check is sent to a dedicated host/service for Ansible.
|
- The passive check is sent to a dedicated host/service for Ansible.
|
||||||
options:
|
options:
|
||||||
url:
|
url:
|
||||||
description: URL of the nrdp server.
|
description: URL of the nrdp server.
|
||||||
required: true
|
required: true
|
||||||
env:
|
env:
|
||||||
- name : NRDP_URL
|
- name: NRDP_URL
|
||||||
ini:
|
ini:
|
||||||
- section: callback_nrdp
|
- section: callback_nrdp
|
||||||
key: url
|
key: url
|
||||||
type: string
|
type: string
|
||||||
validate_certs:
|
validate_certs:
|
||||||
description: Validate the SSL certificate of the nrdp server. (Used for HTTPS URLs.)
|
description: Validate the SSL certificate of the nrdp server. (Used for HTTPS URLs).
|
||||||
env:
|
env:
|
||||||
- name: NRDP_VALIDATE_CERTS
|
- name: NRDP_VALIDATE_CERTS
|
||||||
ini:
|
ini:
|
||||||
- section: callback_nrdp
|
- section: callback_nrdp
|
||||||
key: validate_nrdp_certs
|
key: validate_nrdp_certs
|
||||||
- section: callback_nrdp
|
- section: callback_nrdp
|
||||||
key: validate_certs
|
key: validate_certs
|
||||||
type: boolean
|
type: boolean
|
||||||
default: false
|
default: false
|
||||||
aliases: [ validate_nrdp_certs ]
|
aliases: [validate_nrdp_certs]
|
||||||
token:
|
token:
|
||||||
description: Token to be allowed to push nrdp events.
|
description: Token to be allowed to push nrdp events.
|
||||||
required: true
|
required: true
|
||||||
env:
|
env:
|
||||||
- name: NRDP_TOKEN
|
- name: NRDP_TOKEN
|
||||||
ini:
|
ini:
|
||||||
- section: callback_nrdp
|
- section: callback_nrdp
|
||||||
key: token
|
key: token
|
||||||
type: string
|
type: string
|
||||||
hostname:
|
hostname:
|
||||||
description: Hostname where the passive check is linked to.
|
description: Hostname where the passive check is linked to.
|
||||||
required: true
|
required: true
|
||||||
env:
|
env:
|
||||||
- name : NRDP_HOSTNAME
|
- name: NRDP_HOSTNAME
|
||||||
ini:
|
ini:
|
||||||
- section: callback_nrdp
|
- section: callback_nrdp
|
||||||
key: hostname
|
key: hostname
|
||||||
type: string
|
type: string
|
||||||
servicename:
|
servicename:
|
||||||
description: Service where the passive check is linked to.
|
description: Service where the passive check is linked to.
|
||||||
required: true
|
required: true
|
||||||
env:
|
env:
|
||||||
- name : NRDP_SERVICENAME
|
- name: NRDP_SERVICENAME
|
||||||
ini:
|
ini:
|
||||||
- section: callback_nrdp
|
- section: callback_nrdp
|
||||||
key: servicename
|
key: servicename
|
||||||
type: string
|
type: string
|
||||||
'''
|
"""
|
||||||
|
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
from ansible.module_utils.urls import open_url
|
from ansible.module_utils.urls import open_url
|
||||||
from ansible.plugins.callback import CallbackBase
|
from ansible.plugins.callback import CallbackBase
|
||||||
@@ -132,10 +132,10 @@ class CallbackModule(CallbackBase):
|
|||||||
xmldata = "<?xml version='1.0'?>\n"
|
xmldata = "<?xml version='1.0'?>\n"
|
||||||
xmldata += "<checkresults>\n"
|
xmldata += "<checkresults>\n"
|
||||||
xmldata += "<checkresult type='service'>\n"
|
xmldata += "<checkresult type='service'>\n"
|
||||||
xmldata += "<hostname>%s</hostname>\n" % self.hostname
|
xmldata += f"<hostname>{self.hostname}</hostname>\n"
|
||||||
xmldata += "<servicename>%s</servicename>\n" % self.servicename
|
xmldata += f"<servicename>{self.servicename}</servicename>\n"
|
||||||
xmldata += "<state>%d</state>\n" % state
|
xmldata += f"<state>{state}</state>\n"
|
||||||
xmldata += "<output>%s</output>\n" % msg
|
xmldata += f"<output>{msg}</output>\n"
|
||||||
xmldata += "</checkresult>\n"
|
xmldata += "</checkresult>\n"
|
||||||
xmldata += "</checkresults>\n"
|
xmldata += "</checkresults>\n"
|
||||||
|
|
||||||
@@ -152,7 +152,7 @@ class CallbackModule(CallbackBase):
|
|||||||
validate_certs=self.validate_nrdp_certs)
|
validate_certs=self.validate_nrdp_certs)
|
||||||
return response.read()
|
return response.read()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self._display.warning("NRDP callback cannot send result {0}".format(ex))
|
self._display.warning(f"NRDP callback cannot send result {ex}")
|
||||||
|
|
||||||
def v2_playbook_on_play_start(self, play):
|
def v2_playbook_on_play_start(self, play):
|
||||||
'''
|
'''
|
||||||
@@ -170,17 +170,16 @@ class CallbackModule(CallbackBase):
|
|||||||
critical = warning = 0
|
critical = warning = 0
|
||||||
for host in hosts:
|
for host in hosts:
|
||||||
stat = stats.summarize(host)
|
stat = stats.summarize(host)
|
||||||
gstats += "'%s_ok'=%d '%s_changed'=%d \
|
gstats += (
|
||||||
'%s_unreachable'=%d '%s_failed'=%d " % \
|
f"'{host}_ok'={stat['ok']} '{host}_changed'={stat['changed']} '{host}_unreachable'={stat['unreachable']} '{host}_failed'={stat['failures']} "
|
||||||
(host, stat['ok'], host, stat['changed'],
|
)
|
||||||
host, stat['unreachable'], host, stat['failures'])
|
|
||||||
# Critical when failed tasks or unreachable host
|
# Critical when failed tasks or unreachable host
|
||||||
critical += stat['failures']
|
critical += stat['failures']
|
||||||
critical += stat['unreachable']
|
critical += stat['unreachable']
|
||||||
# Warning when changed tasks
|
# Warning when changed tasks
|
||||||
warning += stat['changed']
|
warning += stat['changed']
|
||||||
|
|
||||||
msg = "%s | %s" % (name, gstats)
|
msg = f"{name} | {gstats}"
|
||||||
if critical:
|
if critical:
|
||||||
# Send Critical
|
# Send Critical
|
||||||
self._send_nrdp(self.CRITICAL, msg)
|
self._send_nrdp(self.CRITICAL, msg)
|
||||||
|
|||||||
@@ -4,19 +4,18 @@
|
|||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
# Make coding more python3-ish
|
# Make coding more python3-ish
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: 'null'
|
name: 'null'
|
||||||
type: stdout
|
type: stdout
|
||||||
requirements:
|
requirements:
|
||||||
- set as main display callback
|
- set as main display callback
|
||||||
short_description: Don't display stuff to screen
|
short_description: do not display stuff to screen
|
||||||
description:
|
description:
|
||||||
- This callback prevents outputting events to screen.
|
- This callback prevents outputting events to screen.
|
||||||
'''
|
"""
|
||||||
|
|
||||||
from ansible.plugins.callback import CallbackBase
|
from ansible.plugins.callback import CallbackBase
|
||||||
|
|
||||||
|
|||||||
@@ -3,122 +3,122 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Victor Martinez (@v1v) <VictorMartinezRubio@gmail.com>
|
author: Victor Martinez (@v1v) <VictorMartinezRubio@gmail.com>
|
||||||
name: opentelemetry
|
name: opentelemetry
|
||||||
type: notification
|
type: notification
|
||||||
short_description: Create distributed traces with OpenTelemetry
|
short_description: Create distributed traces with OpenTelemetry
|
||||||
version_added: 3.7.0
|
version_added: 3.7.0
|
||||||
|
description:
|
||||||
|
- This callback creates distributed traces for each Ansible task with OpenTelemetry.
|
||||||
|
- You can configure the OpenTelemetry exporter and SDK with environment variables.
|
||||||
|
- See U(https://opentelemetry-python.readthedocs.io/en/latest/exporter/otlp/otlp.html).
|
||||||
|
- See
|
||||||
|
U(https://opentelemetry-python.readthedocs.io/en/latest/sdk/environment_variables.html#opentelemetry-sdk-environment-variables).
|
||||||
|
options:
|
||||||
|
hide_task_arguments:
|
||||||
|
default: false
|
||||||
|
type: bool
|
||||||
description:
|
description:
|
||||||
- This callback creates distributed traces for each Ansible task with OpenTelemetry.
|
- Hide the arguments for a task.
|
||||||
- You can configure the OpenTelemetry exporter and SDK with environment variables.
|
env:
|
||||||
- See U(https://opentelemetry-python.readthedocs.io/en/latest/exporter/otlp/otlp.html).
|
- name: ANSIBLE_OPENTELEMETRY_HIDE_TASK_ARGUMENTS
|
||||||
- See U(https://opentelemetry-python.readthedocs.io/en/latest/sdk/environment_variables.html#opentelemetry-sdk-environment-variables).
|
ini:
|
||||||
options:
|
- section: callback_opentelemetry
|
||||||
hide_task_arguments:
|
key: hide_task_arguments
|
||||||
default: false
|
version_added: 5.3.0
|
||||||
type: bool
|
enable_from_environment:
|
||||||
description:
|
type: str
|
||||||
- Hide the arguments for a task.
|
description:
|
||||||
env:
|
- Whether to enable this callback only if the given environment variable exists and it is set to V(true).
|
||||||
- name: ANSIBLE_OPENTELEMETRY_HIDE_TASK_ARGUMENTS
|
- This is handy when you use Configuration as Code and want to send distributed traces if running in the CI rather when
|
||||||
ini:
|
running Ansible locally.
|
||||||
- section: callback_opentelemetry
|
- For such, it evaluates the given O(enable_from_environment) value as environment variable and if set to true this
|
||||||
key: hide_task_arguments
|
plugin will be enabled.
|
||||||
version_added: 5.3.0
|
env:
|
||||||
enable_from_environment:
|
- name: ANSIBLE_OPENTELEMETRY_ENABLE_FROM_ENVIRONMENT
|
||||||
type: str
|
ini:
|
||||||
description:
|
- section: callback_opentelemetry
|
||||||
- Whether to enable this callback only if the given environment variable exists and it is set to V(true).
|
key: enable_from_environment
|
||||||
- This is handy when you use Configuration as Code and want to send distributed traces
|
version_added: 5.3.0
|
||||||
if running in the CI rather when running Ansible locally.
|
version_added: 3.8.0
|
||||||
- For such, it evaluates the given O(enable_from_environment) value as environment variable
|
otel_service_name:
|
||||||
and if set to true this plugin will be enabled.
|
default: ansible
|
||||||
env:
|
type: str
|
||||||
- name: ANSIBLE_OPENTELEMETRY_ENABLE_FROM_ENVIRONMENT
|
description:
|
||||||
ini:
|
- The service name resource attribute.
|
||||||
- section: callback_opentelemetry
|
env:
|
||||||
key: enable_from_environment
|
- name: OTEL_SERVICE_NAME
|
||||||
version_added: 5.3.0
|
ini:
|
||||||
version_added: 3.8.0
|
- section: callback_opentelemetry
|
||||||
otel_service_name:
|
key: otel_service_name
|
||||||
default: ansible
|
version_added: 5.3.0
|
||||||
type: str
|
traceparent:
|
||||||
description:
|
default: None
|
||||||
- The service name resource attribute.
|
type: str
|
||||||
env:
|
description:
|
||||||
- name: OTEL_SERVICE_NAME
|
- The L(W3C Trace Context header traceparent,https://www.w3.org/TR/trace-context-1/#traceparent-header).
|
||||||
ini:
|
env:
|
||||||
- section: callback_opentelemetry
|
- name: TRACEPARENT
|
||||||
key: otel_service_name
|
disable_logs:
|
||||||
version_added: 5.3.0
|
default: false
|
||||||
traceparent:
|
type: bool
|
||||||
default: None
|
description:
|
||||||
type: str
|
- Disable sending logs.
|
||||||
description:
|
env:
|
||||||
- The L(W3C Trace Context header traceparent,https://www.w3.org/TR/trace-context-1/#traceparent-header).
|
- name: ANSIBLE_OPENTELEMETRY_DISABLE_LOGS
|
||||||
env:
|
ini:
|
||||||
- name: TRACEPARENT
|
- section: callback_opentelemetry
|
||||||
disable_logs:
|
key: disable_logs
|
||||||
default: false
|
version_added: 5.8.0
|
||||||
type: bool
|
disable_attributes_in_logs:
|
||||||
description:
|
default: false
|
||||||
- Disable sending logs.
|
type: bool
|
||||||
env:
|
description:
|
||||||
- name: ANSIBLE_OPENTELEMETRY_DISABLE_LOGS
|
- Disable populating span attributes to the logs.
|
||||||
ini:
|
env:
|
||||||
- section: callback_opentelemetry
|
- name: ANSIBLE_OPENTELEMETRY_DISABLE_ATTRIBUTES_IN_LOGS
|
||||||
key: disable_logs
|
ini:
|
||||||
version_added: 5.8.0
|
- section: callback_opentelemetry
|
||||||
disable_attributes_in_logs:
|
key: disable_attributes_in_logs
|
||||||
default: false
|
version_added: 7.1.0
|
||||||
type: bool
|
store_spans_in_file:
|
||||||
description:
|
type: str
|
||||||
- Disable populating span attributes to the logs.
|
description:
|
||||||
env:
|
- It stores the exported spans in the given file.
|
||||||
- name: ANSIBLE_OPENTELEMETRY_DISABLE_ATTRIBUTES_IN_LOGS
|
env:
|
||||||
ini:
|
- name: ANSIBLE_OPENTELEMETRY_STORE_SPANS_IN_FILE
|
||||||
- section: callback_opentelemetry
|
ini:
|
||||||
key: disable_attributes_in_logs
|
- section: callback_opentelemetry
|
||||||
version_added: 7.1.0
|
key: store_spans_in_file
|
||||||
store_spans_in_file:
|
version_added: 9.0.0
|
||||||
type: str
|
otel_exporter_otlp_traces_protocol:
|
||||||
description:
|
type: str
|
||||||
- It stores the exported spans in the given file
|
description:
|
||||||
env:
|
- E(OTEL_EXPORTER_OTLP_TRACES_PROTOCOL) represents the transport protocol for spans.
|
||||||
- name: ANSIBLE_OPENTELEMETRY_STORE_SPANS_IN_FILE
|
- See
|
||||||
ini:
|
U(https://opentelemetry-python.readthedocs.io/en/latest/sdk/environment_variables.html#envvar-OTEL_EXPORTER_OTLP_TRACES_PROTOCOL).
|
||||||
- section: callback_opentelemetry
|
default: grpc
|
||||||
key: store_spans_in_file
|
choices:
|
||||||
version_added: 9.0.0
|
- grpc
|
||||||
otel_exporter_otlp_traces_protocol:
|
- http/protobuf
|
||||||
type: str
|
env:
|
||||||
description:
|
- name: OTEL_EXPORTER_OTLP_TRACES_PROTOCOL
|
||||||
- E(OTEL_EXPORTER_OTLP_TRACES_PROTOCOL) represents the the transport protocol for spans.
|
ini:
|
||||||
- See
|
- section: callback_opentelemetry
|
||||||
U(https://opentelemetry-python.readthedocs.io/en/latest/sdk/environment_variables.html#envvar-OTEL_EXPORTER_OTLP_TRACES_PROTOCOL).
|
key: otel_exporter_otlp_traces_protocol
|
||||||
default: grpc
|
version_added: 9.0.0
|
||||||
choices:
|
requirements:
|
||||||
- grpc
|
- opentelemetry-api (Python library)
|
||||||
- http/protobuf
|
- opentelemetry-exporter-otlp (Python library)
|
||||||
env:
|
- opentelemetry-sdk (Python library)
|
||||||
- name: OTEL_EXPORTER_OTLP_TRACES_PROTOCOL
|
"""
|
||||||
ini:
|
|
||||||
- section: callback_opentelemetry
|
|
||||||
key: otel_exporter_otlp_traces_protocol
|
|
||||||
version_added: 9.0.0
|
|
||||||
requirements:
|
|
||||||
- opentelemetry-api (Python library)
|
|
||||||
- opentelemetry-exporter-otlp (Python library)
|
|
||||||
- opentelemetry-sdk (Python library)
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
|
||||||
EXAMPLES = '''
|
EXAMPLES = r"""
|
||||||
examples: |
|
examples: |-
|
||||||
Enable the plugin in ansible.cfg:
|
Enable the plugin in ansible.cfg:
|
||||||
[defaults]
|
[defaults]
|
||||||
callbacks_enabled = community.general.opentelemetry
|
callbacks_enabled = community.general.opentelemetry
|
||||||
@@ -130,22 +130,19 @@ examples: |
|
|||||||
export OTEL_EXPORTER_OTLP_HEADERS="authorization=Bearer your_otel_token"
|
export OTEL_EXPORTER_OTLP_HEADERS="authorization=Bearer your_otel_token"
|
||||||
export OTEL_SERVICE_NAME=your_service_name
|
export OTEL_SERVICE_NAME=your_service_name
|
||||||
export ANSIBLE_OPENTELEMETRY_ENABLED=true
|
export ANSIBLE_OPENTELEMETRY_ENABLED=true
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import getpass
|
import getpass
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import socket
|
import socket
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from os.path import basename
|
from os.path import basename
|
||||||
|
from time import time_ns
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils.six import raise_from
|
|
||||||
from ansible.module_utils.six.moves.urllib.parse import urlparse
|
|
||||||
from ansible.plugins.callback import CallbackBase
|
from ansible.plugins.callback import CallbackBase
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -164,31 +161,12 @@ try:
|
|||||||
from opentelemetry.sdk.trace.export.in_memory_span_exporter import (
|
from opentelemetry.sdk.trace.export.in_memory_span_exporter import (
|
||||||
InMemorySpanExporter
|
InMemorySpanExporter
|
||||||
)
|
)
|
||||||
# Support for opentelemetry-api <= 1.12
|
|
||||||
try:
|
|
||||||
from opentelemetry.util._time import _time_ns
|
|
||||||
except ImportError as imp_exc:
|
|
||||||
OTEL_LIBRARY_TIME_NS_ERROR = imp_exc
|
|
||||||
else:
|
|
||||||
OTEL_LIBRARY_TIME_NS_ERROR = None
|
|
||||||
|
|
||||||
except ImportError as imp_exc:
|
except ImportError as imp_exc:
|
||||||
OTEL_LIBRARY_IMPORT_ERROR = imp_exc
|
OTEL_LIBRARY_IMPORT_ERROR = imp_exc
|
||||||
OTEL_LIBRARY_TIME_NS_ERROR = imp_exc
|
|
||||||
else:
|
else:
|
||||||
OTEL_LIBRARY_IMPORT_ERROR = None
|
OTEL_LIBRARY_IMPORT_ERROR = None
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 7):
|
|
||||||
time_ns = time.time_ns
|
|
||||||
elif not OTEL_LIBRARY_TIME_NS_ERROR:
|
|
||||||
time_ns = _time_ns
|
|
||||||
else:
|
|
||||||
def time_ns():
|
|
||||||
# Support versions older than 3.7 with opentelemetry-api > 1.12
|
|
||||||
return int(time.time() * 1e9)
|
|
||||||
|
|
||||||
|
|
||||||
class TaskData:
|
class TaskData:
|
||||||
"""
|
"""
|
||||||
Data about an individual task.
|
Data about an individual task.
|
||||||
@@ -209,7 +187,7 @@ class TaskData:
|
|||||||
if host.uuid in self.host_data:
|
if host.uuid in self.host_data:
|
||||||
if host.status == 'included':
|
if host.status == 'included':
|
||||||
# concatenate task include output from multiple items
|
# concatenate task include output from multiple items
|
||||||
host.result = '%s\n%s' % (self.host_data[host.uuid].result, host.result)
|
host.result = f'{self.host_data[host.uuid].result}\n{host.result}'
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -347,7 +325,7 @@ class OpenTelemetrySource(object):
|
|||||||
def update_span_data(self, task_data, host_data, span, disable_logs, disable_attributes_in_logs):
|
def update_span_data(self, task_data, host_data, span, disable_logs, disable_attributes_in_logs):
|
||||||
""" update the span with the given TaskData and HostData """
|
""" update the span with the given TaskData and HostData """
|
||||||
|
|
||||||
name = '[%s] %s: %s' % (host_data.name, task_data.play, task_data.name)
|
name = f'[{host_data.name}] {task_data.play}: {task_data.name}'
|
||||||
|
|
||||||
message = 'success'
|
message = 'success'
|
||||||
res = {}
|
res = {}
|
||||||
@@ -470,7 +448,7 @@ class OpenTelemetrySource(object):
|
|||||||
def get_error_message_from_results(results, action):
|
def get_error_message_from_results(results, action):
|
||||||
for result in results:
|
for result in results:
|
||||||
if result.get('failed', False):
|
if result.get('failed', False):
|
||||||
return ('{0}({1}) - {2}').format(action, result.get('item', 'none'), OpenTelemetrySource.get_error_message(result))
|
return f"{action}({result.get('item', 'none')}) - {OpenTelemetrySource.get_error_message(result)}"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _last_line(text):
|
def _last_line(text):
|
||||||
@@ -482,14 +460,14 @@ class OpenTelemetrySource(object):
|
|||||||
message = result.get('msg', 'failed')
|
message = result.get('msg', 'failed')
|
||||||
exception = result.get('exception')
|
exception = result.get('exception')
|
||||||
stderr = result.get('stderr')
|
stderr = result.get('stderr')
|
||||||
return ('message: "{0}"\nexception: "{1}"\nstderr: "{2}"').format(message, exception, stderr)
|
return f"message: \"{message}\"\nexception: \"{exception}\"\nstderr: \"{stderr}\""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def enrich_error_message_from_results(results, action):
|
def enrich_error_message_from_results(results, action):
|
||||||
message = ""
|
message = ""
|
||||||
for result in results:
|
for result in results:
|
||||||
if result.get('failed', False):
|
if result.get('failed', False):
|
||||||
message = ('{0}({1}) - {2}\n{3}').format(action, result.get('item', 'none'), OpenTelemetrySource.enrich_error_message(result), message)
|
message = f"{action}({result.get('item', 'none')}) - {OpenTelemetrySource.enrich_error_message(result)}\n{message}"
|
||||||
return message
|
return message
|
||||||
|
|
||||||
|
|
||||||
@@ -519,9 +497,9 @@ class CallbackModule(CallbackBase):
|
|||||||
self.otel_exporter_otlp_traces_protocol = None
|
self.otel_exporter_otlp_traces_protocol = None
|
||||||
|
|
||||||
if OTEL_LIBRARY_IMPORT_ERROR:
|
if OTEL_LIBRARY_IMPORT_ERROR:
|
||||||
raise_from(
|
raise AnsibleError(
|
||||||
AnsibleError('The `opentelemetry-api`, `opentelemetry-exporter-otlp` or `opentelemetry-sdk` must be installed to use this plugin'),
|
'The `opentelemetry-api`, `opentelemetry-exporter-otlp` or `opentelemetry-sdk` must be installed to use this plugin'
|
||||||
OTEL_LIBRARY_IMPORT_ERROR)
|
) from OTEL_LIBRARY_IMPORT_ERROR
|
||||||
|
|
||||||
self.tasks_data = OrderedDict()
|
self.tasks_data = OrderedDict()
|
||||||
|
|
||||||
@@ -535,8 +513,9 @@ class CallbackModule(CallbackBase):
|
|||||||
environment_variable = self.get_option('enable_from_environment')
|
environment_variable = self.get_option('enable_from_environment')
|
||||||
if environment_variable is not None and os.environ.get(environment_variable, 'false').lower() != 'true':
|
if environment_variable is not None and os.environ.get(environment_variable, 'false').lower() != 'true':
|
||||||
self.disabled = True
|
self.disabled = True
|
||||||
self._display.warning("The `enable_from_environment` option has been set and {0} is not enabled. "
|
self._display.warning(
|
||||||
"Disabling the `opentelemetry` callback plugin.".format(environment_variable))
|
f"The `enable_from_environment` option has been set and {environment_variable} is not enabled. Disabling the `opentelemetry` callback plugin."
|
||||||
|
)
|
||||||
|
|
||||||
self.hide_task_arguments = self.get_option('hide_task_arguments')
|
self.hide_task_arguments = self.get_option('hide_task_arguments')
|
||||||
|
|
||||||
|
|||||||
64
plugins/callback/print_task.py
Normal file
64
plugins/callback/print_task.py
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright (c) 2025, Max Mitschke <maxmitschke@fastmail.com>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
DOCUMENTATION = r'''
|
||||||
|
name: print_task
|
||||||
|
type: aggregate
|
||||||
|
short_description: Prints playbook task snippet to job output
|
||||||
|
description:
|
||||||
|
- This plugin prints the currently executing playbook task to the job output.
|
||||||
|
version_added: 10.7.0
|
||||||
|
requirements:
|
||||||
|
- enable in configuration
|
||||||
|
'''
|
||||||
|
|
||||||
|
EXAMPLES = r'''
|
||||||
|
ansible.cfg: >
|
||||||
|
# Enable plugin
|
||||||
|
[defaults]
|
||||||
|
callbacks_enabled=community.general.print_task
|
||||||
|
'''
|
||||||
|
|
||||||
|
from yaml import load, dump
|
||||||
|
|
||||||
|
try:
|
||||||
|
from yaml import CSafeDumper as SafeDumper
|
||||||
|
from yaml import CSafeLoader as SafeLoader
|
||||||
|
except ImportError:
|
||||||
|
from yaml import SafeDumper, SafeLoader
|
||||||
|
|
||||||
|
from ansible.plugins.callback import CallbackBase
|
||||||
|
|
||||||
|
|
||||||
|
class CallbackModule(CallbackBase):
|
||||||
|
"""
|
||||||
|
This callback module tells you how long your plays ran for.
|
||||||
|
"""
|
||||||
|
CALLBACK_VERSION = 2.0
|
||||||
|
CALLBACK_TYPE = 'aggregate'
|
||||||
|
CALLBACK_NAME = 'community.general.print_task'
|
||||||
|
|
||||||
|
CALLBACK_NEEDS_ENABLED = True
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(CallbackModule, self).__init__()
|
||||||
|
self._printed_message = False
|
||||||
|
|
||||||
|
def _print_task(self, task):
|
||||||
|
if hasattr(task, '_ds'):
|
||||||
|
task_snippet = load(str([task._ds.copy()]), Loader=SafeLoader)
|
||||||
|
task_yaml = dump(task_snippet, sort_keys=False, Dumper=SafeDumper)
|
||||||
|
self._display.display(f"\n{task_yaml}\n")
|
||||||
|
self._printed_message = True
|
||||||
|
|
||||||
|
def v2_playbook_on_task_start(self, task, is_conditional):
|
||||||
|
self._printed_message = False
|
||||||
|
|
||||||
|
def v2_runner_on_start(self, host, task):
|
||||||
|
if not self._printed_message:
|
||||||
|
self._print_task(task)
|
||||||
@@ -5,20 +5,19 @@
|
|||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
# Make coding more python3-ish
|
# Make coding more python3-ish
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: say
|
name: say
|
||||||
type: notification
|
type: notification
|
||||||
requirements:
|
requirements:
|
||||||
- whitelisting in configuration
|
- whitelisting in configuration
|
||||||
- the C(/usr/bin/say) command line program (standard on macOS) or C(espeak) command line program
|
- the C(/usr/bin/say) command line program (standard on macOS) or C(espeak) command line program
|
||||||
short_description: notify using software speech synthesizer
|
short_description: notify using software speech synthesizer
|
||||||
description:
|
description:
|
||||||
- This plugin will use the C(say) or C(espeak) program to "speak" about play events.
|
- This plugin will use the C(say) or C(espeak) program to "speak" about play events.
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import platform
|
import platform
|
||||||
import subprocess
|
import subprocess
|
||||||
@@ -50,7 +49,7 @@ class CallbackModule(CallbackBase):
|
|||||||
self.synthesizer = get_bin_path('say')
|
self.synthesizer = get_bin_path('say')
|
||||||
if platform.system() != 'Darwin':
|
if platform.system() != 'Darwin':
|
||||||
# 'say' binary available, it might be GNUstep tool which doesn't support 'voice' parameter
|
# 'say' binary available, it might be GNUstep tool which doesn't support 'voice' parameter
|
||||||
self._display.warning("'say' executable found but system is '%s': ignoring voice parameter" % platform.system())
|
self._display.warning(f"'say' executable found but system is '{platform.system()}': ignoring voice parameter")
|
||||||
else:
|
else:
|
||||||
self.FAILED_VOICE = 'Zarvox'
|
self.FAILED_VOICE = 'Zarvox'
|
||||||
self.REGULAR_VOICE = 'Trinoids'
|
self.REGULAR_VOICE = 'Trinoids'
|
||||||
@@ -69,7 +68,7 @@ class CallbackModule(CallbackBase):
|
|||||||
# ansible will not call any callback if disabled is set to True
|
# ansible will not call any callback if disabled is set to True
|
||||||
if not self.synthesizer:
|
if not self.synthesizer:
|
||||||
self.disabled = True
|
self.disabled = True
|
||||||
self._display.warning("Unable to find either 'say' or 'espeak' executable, plugin %s disabled" % os.path.basename(__file__))
|
self._display.warning(f"Unable to find either 'say' or 'espeak' executable, plugin {os.path.basename(__file__)} disabled")
|
||||||
|
|
||||||
def say(self, msg, voice):
|
def say(self, msg, voice):
|
||||||
cmd = [self.synthesizer, msg]
|
cmd = [self.synthesizer, msg]
|
||||||
@@ -78,7 +77,7 @@ class CallbackModule(CallbackBase):
|
|||||||
subprocess.call(cmd)
|
subprocess.call(cmd)
|
||||||
|
|
||||||
def runner_on_failed(self, host, res, ignore_errors=False):
|
def runner_on_failed(self, host, res, ignore_errors=False):
|
||||||
self.say("Failure on host %s" % host, self.FAILED_VOICE)
|
self.say(f"Failure on host {host}", self.FAILED_VOICE)
|
||||||
|
|
||||||
def runner_on_ok(self, host, res):
|
def runner_on_ok(self, host, res):
|
||||||
self.say("pew", self.LASER_VOICE)
|
self.say("pew", self.LASER_VOICE)
|
||||||
@@ -87,13 +86,13 @@ class CallbackModule(CallbackBase):
|
|||||||
self.say("pew", self.LASER_VOICE)
|
self.say("pew", self.LASER_VOICE)
|
||||||
|
|
||||||
def runner_on_unreachable(self, host, res):
|
def runner_on_unreachable(self, host, res):
|
||||||
self.say("Failure on host %s" % host, self.FAILED_VOICE)
|
self.say(f"Failure on host {host}", self.FAILED_VOICE)
|
||||||
|
|
||||||
def runner_on_async_ok(self, host, res, jid):
|
def runner_on_async_ok(self, host, res, jid):
|
||||||
self.say("pew", self.LASER_VOICE)
|
self.say("pew", self.LASER_VOICE)
|
||||||
|
|
||||||
def runner_on_async_failed(self, host, res, jid):
|
def runner_on_async_failed(self, host, res, jid):
|
||||||
self.say("Failure on host %s" % host, self.FAILED_VOICE)
|
self.say(f"Failure on host {host}", self.FAILED_VOICE)
|
||||||
|
|
||||||
def playbook_on_start(self):
|
def playbook_on_start(self):
|
||||||
self.say("Running Playbook", self.REGULAR_VOICE)
|
self.say("Running Playbook", self.REGULAR_VOICE)
|
||||||
@@ -103,15 +102,15 @@ class CallbackModule(CallbackBase):
|
|||||||
|
|
||||||
def playbook_on_task_start(self, name, is_conditional):
|
def playbook_on_task_start(self, name, is_conditional):
|
||||||
if not is_conditional:
|
if not is_conditional:
|
||||||
self.say("Starting task: %s" % name, self.REGULAR_VOICE)
|
self.say(f"Starting task: {name}", self.REGULAR_VOICE)
|
||||||
else:
|
else:
|
||||||
self.say("Notifying task: %s" % name, self.REGULAR_VOICE)
|
self.say(f"Notifying task: {name}", self.REGULAR_VOICE)
|
||||||
|
|
||||||
def playbook_on_setup(self):
|
def playbook_on_setup(self):
|
||||||
self.say("Gathering facts", self.REGULAR_VOICE)
|
self.say("Gathering facts", self.REGULAR_VOICE)
|
||||||
|
|
||||||
def playbook_on_play_start(self, name):
|
def playbook_on_play_start(self, name):
|
||||||
self.say("Starting play: %s" % name, self.HAPPY_VOICE)
|
self.say(f"Starting play: {name}", self.HAPPY_VOICE)
|
||||||
|
|
||||||
def playbook_on_stats(self, stats):
|
def playbook_on_stats(self, stats):
|
||||||
self.say("Play complete", self.HAPPY_VOICE)
|
self.say("Play complete", self.HAPPY_VOICE)
|
||||||
|
|||||||
@@ -4,38 +4,37 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: selective
|
name: selective
|
||||||
type: stdout
|
type: stdout
|
||||||
requirements:
|
requirements:
|
||||||
- set as main display callback
|
- set as main display callback
|
||||||
short_description: only print certain tasks
|
short_description: only print certain tasks
|
||||||
description:
|
description:
|
||||||
- This callback only prints tasks that have been tagged with C(print_action) or that have failed.
|
- This callback only prints tasks that have been tagged with C(print_action) or that have failed. This allows operators
|
||||||
This allows operators to focus on the tasks that provide value only.
|
to focus on the tasks that provide value only.
|
||||||
- Tasks that are not printed are placed with a C(.).
|
- Tasks that are not printed are placed with a C(.).
|
||||||
- If you increase verbosity all tasks are printed.
|
- If you increase verbosity all tasks are printed.
|
||||||
options:
|
options:
|
||||||
nocolor:
|
nocolor:
|
||||||
default: false
|
default: false
|
||||||
description: This setting allows suppressing colorizing output.
|
description: This setting allows suppressing colorizing output.
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_NOCOLOR
|
- name: ANSIBLE_NOCOLOR
|
||||||
- name: ANSIBLE_SELECTIVE_DONT_COLORIZE
|
- name: ANSIBLE_SELECTIVE_DONT_COLORIZE
|
||||||
ini:
|
ini:
|
||||||
- section: defaults
|
- section: defaults
|
||||||
key: nocolor
|
key: nocolor
|
||||||
type: boolean
|
type: boolean
|
||||||
'''
|
"""
|
||||||
|
|
||||||
EXAMPLES = """
|
EXAMPLES = r"""
|
||||||
- ansible.builtin.debug: msg="This will not be printed"
|
- ansible.builtin.debug: msg="This will not be printed"
|
||||||
- ansible.builtin.debug: msg="But this will"
|
- ansible.builtin.debug: msg="But this will"
|
||||||
tags: [print_action]
|
tags: [print_action]
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import difflib
|
import difflib
|
||||||
@@ -48,13 +47,13 @@ from ansible.module_utils.common.text.converters import to_text
|
|||||||
DONT_COLORIZE = False
|
DONT_COLORIZE = False
|
||||||
COLORS = {
|
COLORS = {
|
||||||
'normal': '\033[0m',
|
'normal': '\033[0m',
|
||||||
'ok': '\033[{0}m'.format(C.COLOR_CODES[C.COLOR_OK]),
|
'ok': f'\x1b[{C.COLOR_CODES[C.COLOR_OK]}m',
|
||||||
'bold': '\033[1m',
|
'bold': '\033[1m',
|
||||||
'not_so_bold': '\033[1m\033[34m',
|
'not_so_bold': '\033[1m\033[34m',
|
||||||
'changed': '\033[{0}m'.format(C.COLOR_CODES[C.COLOR_CHANGED]),
|
'changed': f'\x1b[{C.COLOR_CODES[C.COLOR_CHANGED]}m',
|
||||||
'failed': '\033[{0}m'.format(C.COLOR_CODES[C.COLOR_ERROR]),
|
'failed': f'\x1b[{C.COLOR_CODES[C.COLOR_ERROR]}m',
|
||||||
'endc': '\033[0m',
|
'endc': '\033[0m',
|
||||||
'skipped': '\033[{0}m'.format(C.COLOR_CODES[C.COLOR_SKIP]),
|
'skipped': f'\x1b[{C.COLOR_CODES[C.COLOR_SKIP]}m',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -73,7 +72,7 @@ def colorize(msg, color):
|
|||||||
if DONT_COLORIZE:
|
if DONT_COLORIZE:
|
||||||
return msg
|
return msg
|
||||||
else:
|
else:
|
||||||
return '{0}{1}{2}'.format(COLORS[color], msg, COLORS['endc'])
|
return f"{COLORS[color]}{msg}{COLORS['endc']}"
|
||||||
|
|
||||||
|
|
||||||
class CallbackModule(CallbackBase):
|
class CallbackModule(CallbackBase):
|
||||||
@@ -106,15 +105,15 @@ class CallbackModule(CallbackBase):
|
|||||||
line_length = 120
|
line_length = 120
|
||||||
if self.last_skipped:
|
if self.last_skipped:
|
||||||
print()
|
print()
|
||||||
line = "# {0} ".format(task_name)
|
line = f"# {task_name} "
|
||||||
msg = colorize("{0}{1}".format(line, '*' * (line_length - len(line))), 'bold')
|
msg = colorize(f"{line}{'*' * (line_length - len(line))}", 'bold')
|
||||||
print(msg)
|
print(msg)
|
||||||
|
|
||||||
def _indent_text(self, text, indent_level):
|
def _indent_text(self, text, indent_level):
|
||||||
lines = text.splitlines()
|
lines = text.splitlines()
|
||||||
result_lines = []
|
result_lines = []
|
||||||
for l in lines:
|
for l in lines:
|
||||||
result_lines.append("{0}{1}".format(' ' * indent_level, l))
|
result_lines.append(f"{' ' * indent_level}{l}")
|
||||||
return '\n'.join(result_lines)
|
return '\n'.join(result_lines)
|
||||||
|
|
||||||
def _print_diff(self, diff, indent_level):
|
def _print_diff(self, diff, indent_level):
|
||||||
@@ -147,19 +146,19 @@ class CallbackModule(CallbackBase):
|
|||||||
change_string = colorize('FAILED!!!', color)
|
change_string = colorize('FAILED!!!', color)
|
||||||
else:
|
else:
|
||||||
color = 'changed' if changed else 'ok'
|
color = 'changed' if changed else 'ok'
|
||||||
change_string = colorize("changed={0}".format(changed), color)
|
change_string = colorize(f"changed={changed}", color)
|
||||||
|
|
||||||
msg = colorize(msg, color)
|
msg = colorize(msg, color)
|
||||||
|
|
||||||
line_length = 120
|
line_length = 120
|
||||||
spaces = ' ' * (40 - len(name) - indent_level)
|
spaces = ' ' * (40 - len(name) - indent_level)
|
||||||
line = "{0} * {1}{2}- {3}".format(' ' * indent_level, name, spaces, change_string)
|
line = f"{' ' * indent_level} * {name}{spaces}- {change_string}"
|
||||||
|
|
||||||
if len(msg) < 50:
|
if len(msg) < 50:
|
||||||
line += ' -- {0}'.format(msg)
|
line += f' -- {msg}'
|
||||||
print("{0} {1}---------".format(line, '-' * (line_length - len(line))))
|
print(f"{line} {'-' * (line_length - len(line))}---------")
|
||||||
else:
|
else:
|
||||||
print("{0} {1}".format(line, '-' * (line_length - len(line))))
|
print(f"{line} {'-' * (line_length - len(line))}")
|
||||||
print(self._indent_text(msg, indent_level + 4))
|
print(self._indent_text(msg, indent_level + 4))
|
||||||
|
|
||||||
if diff:
|
if diff:
|
||||||
@@ -209,7 +208,7 @@ class CallbackModule(CallbackBase):
|
|||||||
stderr = [r.get('exception', None), r.get('module_stderr', None)]
|
stderr = [r.get('exception', None), r.get('module_stderr', None)]
|
||||||
stderr = "\n".join([e for e in stderr if e]).strip()
|
stderr = "\n".join([e for e in stderr if e]).strip()
|
||||||
|
|
||||||
self._print_host_or_item(r['item'],
|
self._print_host_or_item(r[r['ansible_loop_var']],
|
||||||
r.get('changed', False),
|
r.get('changed', False),
|
||||||
to_text(r.get('msg', '')),
|
to_text(r.get('msg', '')),
|
||||||
r.get('diff', None),
|
r.get('diff', None),
|
||||||
@@ -239,8 +238,10 @@ class CallbackModule(CallbackBase):
|
|||||||
else:
|
else:
|
||||||
color = 'ok'
|
color = 'ok'
|
||||||
|
|
||||||
msg = '{0} : ok={1}\tchanged={2}\tfailed={3}\tunreachable={4}\trescued={5}\tignored={6}'.format(
|
msg = (
|
||||||
host, s['ok'], s['changed'], s['failures'], s['unreachable'], s['rescued'], s['ignored'])
|
f"{host} : ok={s['ok']}\tchanged={s['changed']}\tfailed={s['failures']}\tunreachable="
|
||||||
|
f"{s['unreachable']}\trescued={s['rescued']}\tignored={s['ignored']}"
|
||||||
|
)
|
||||||
print(colorize(msg, color))
|
print(colorize(msg, color))
|
||||||
|
|
||||||
def v2_runner_on_skipped(self, result, **kwargs):
|
def v2_runner_on_skipped(self, result, **kwargs):
|
||||||
@@ -252,17 +253,15 @@ class CallbackModule(CallbackBase):
|
|||||||
line_length = 120
|
line_length = 120
|
||||||
spaces = ' ' * (31 - len(result._host.name) - 4)
|
spaces = ' ' * (31 - len(result._host.name) - 4)
|
||||||
|
|
||||||
line = " * {0}{1}- {2}".format(colorize(result._host.name, 'not_so_bold'),
|
line = f" * {colorize(result._host.name, 'not_so_bold')}{spaces}- {colorize('skipped', 'skipped')}"
|
||||||
spaces,
|
|
||||||
colorize("skipped", 'skipped'),)
|
|
||||||
|
|
||||||
reason = result._result.get('skipped_reason', '') or \
|
reason = result._result.get('skipped_reason', '') or \
|
||||||
result._result.get('skip_reason', '')
|
result._result.get('skip_reason', '')
|
||||||
if len(reason) < 50:
|
if len(reason) < 50:
|
||||||
line += ' -- {0}'.format(reason)
|
line += f' -- {reason}'
|
||||||
print("{0} {1}---------".format(line, '-' * (line_length - len(line))))
|
print(f"{line} {'-' * (line_length - len(line))}---------")
|
||||||
else:
|
else:
|
||||||
print("{0} {1}".format(line, '-' * (line_length - len(line))))
|
print(f"{line} {'-' * (line_length - len(line))}")
|
||||||
print(self._indent_text(reason, 8))
|
print(self._indent_text(reason, 8))
|
||||||
print(reason)
|
print(reason)
|
||||||
|
|
||||||
|
|||||||
@@ -5,64 +5,67 @@
|
|||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
# Make coding more python3-ish
|
# Make coding more python3-ish
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: slack
|
name: slack
|
||||||
type: notification
|
type: notification
|
||||||
requirements:
|
requirements:
|
||||||
- whitelist in configuration
|
- whitelist in configuration
|
||||||
- prettytable (python library)
|
- prettytable (python library)
|
||||||
short_description: Sends play events to a Slack channel
|
short_description: Sends play events to a Slack channel
|
||||||
|
description:
|
||||||
|
- This is an ansible callback plugin that sends status updates to a Slack channel during playbook execution.
|
||||||
|
options:
|
||||||
|
http_agent:
|
||||||
description:
|
description:
|
||||||
- This is an ansible callback plugin that sends status updates to a Slack channel during playbook execution.
|
- HTTP user agent to use for requests to Slack.
|
||||||
options:
|
type: string
|
||||||
webhook_url:
|
version_added: "10.5.0"
|
||||||
required: true
|
webhook_url:
|
||||||
description: Slack Webhook URL.
|
required: true
|
||||||
type: str
|
description: Slack Webhook URL.
|
||||||
env:
|
type: str
|
||||||
- name: SLACK_WEBHOOK_URL
|
env:
|
||||||
ini:
|
- name: SLACK_WEBHOOK_URL
|
||||||
- section: callback_slack
|
ini:
|
||||||
key: webhook_url
|
- section: callback_slack
|
||||||
channel:
|
key: webhook_url
|
||||||
default: "#ansible"
|
channel:
|
||||||
description: Slack room to post in.
|
default: "#ansible"
|
||||||
type: str
|
description: Slack room to post in.
|
||||||
env:
|
type: str
|
||||||
- name: SLACK_CHANNEL
|
env:
|
||||||
ini:
|
- name: SLACK_CHANNEL
|
||||||
- section: callback_slack
|
ini:
|
||||||
key: channel
|
- section: callback_slack
|
||||||
username:
|
key: channel
|
||||||
description: Username to post as.
|
username:
|
||||||
type: str
|
description: Username to post as.
|
||||||
env:
|
type: str
|
||||||
- name: SLACK_USERNAME
|
env:
|
||||||
default: ansible
|
- name: SLACK_USERNAME
|
||||||
ini:
|
default: ansible
|
||||||
- section: callback_slack
|
ini:
|
||||||
key: username
|
- section: callback_slack
|
||||||
validate_certs:
|
key: username
|
||||||
description: Validate the SSL certificate of the Slack server for HTTPS URLs.
|
validate_certs:
|
||||||
env:
|
description: Validate the SSL certificate of the Slack server for HTTPS URLs.
|
||||||
- name: SLACK_VALIDATE_CERTS
|
env:
|
||||||
ini:
|
- name: SLACK_VALIDATE_CERTS
|
||||||
- section: callback_slack
|
ini:
|
||||||
key: validate_certs
|
- section: callback_slack
|
||||||
default: true
|
key: validate_certs
|
||||||
type: bool
|
default: true
|
||||||
'''
|
type: bool
|
||||||
|
"""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from ansible import context
|
from ansible import context
|
||||||
from ansible.module_utils.common.text.converters import to_text
|
|
||||||
from ansible.module_utils.urls import open_url
|
from ansible.module_utils.urls import open_url
|
||||||
from ansible.plugins.callback import CallbackBase
|
from ansible.plugins.callback import CallbackBase
|
||||||
|
|
||||||
@@ -108,7 +111,7 @@ class CallbackModule(CallbackBase):
|
|||||||
self.username = self.get_option('username')
|
self.username = self.get_option('username')
|
||||||
self.show_invocation = (self._display.verbosity > 1)
|
self.show_invocation = (self._display.verbosity > 1)
|
||||||
self.validate_certs = self.get_option('validate_certs')
|
self.validate_certs = self.get_option('validate_certs')
|
||||||
|
self.http_agent = self.get_option('http_agent')
|
||||||
if self.webhook_url is None:
|
if self.webhook_url is None:
|
||||||
self.disabled = True
|
self.disabled = True
|
||||||
self._display.warning('Slack Webhook URL was not provided. The '
|
self._display.warning('Slack Webhook URL was not provided. The '
|
||||||
@@ -134,18 +137,22 @@ class CallbackModule(CallbackBase):
|
|||||||
self._display.debug(data)
|
self._display.debug(data)
|
||||||
self._display.debug(self.webhook_url)
|
self._display.debug(self.webhook_url)
|
||||||
try:
|
try:
|
||||||
response = open_url(self.webhook_url, data=data, validate_certs=self.validate_certs,
|
response = open_url(
|
||||||
headers=headers)
|
self.webhook_url,
|
||||||
|
data=data,
|
||||||
|
validate_certs=self.validate_certs,
|
||||||
|
headers=headers,
|
||||||
|
http_agent=self.http_agent,
|
||||||
|
)
|
||||||
return response.read()
|
return response.read()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self._display.warning(u'Could not submit message to Slack: %s' %
|
self._display.warning(f'Could not submit message to Slack: {e}')
|
||||||
to_text(e))
|
|
||||||
|
|
||||||
def v2_playbook_on_start(self, playbook):
|
def v2_playbook_on_start(self, playbook):
|
||||||
self.playbook_name = os.path.basename(playbook._file_name)
|
self.playbook_name = os.path.basename(playbook._file_name)
|
||||||
|
|
||||||
title = [
|
title = [
|
||||||
'*Playbook initiated* (_%s_)' % self.guid
|
f'*Playbook initiated* (_{self.guid}_)'
|
||||||
]
|
]
|
||||||
|
|
||||||
invocation_items = []
|
invocation_items = []
|
||||||
@@ -156,23 +163,23 @@ class CallbackModule(CallbackBase):
|
|||||||
subset = context.CLIARGS['subset']
|
subset = context.CLIARGS['subset']
|
||||||
inventory = [os.path.abspath(i) for i in context.CLIARGS['inventory']]
|
inventory = [os.path.abspath(i) for i in context.CLIARGS['inventory']]
|
||||||
|
|
||||||
invocation_items.append('Inventory: %s' % ', '.join(inventory))
|
invocation_items.append(f"Inventory: {', '.join(inventory)}")
|
||||||
if tags and tags != ['all']:
|
if tags and tags != ['all']:
|
||||||
invocation_items.append('Tags: %s' % ', '.join(tags))
|
invocation_items.append(f"Tags: {', '.join(tags)}")
|
||||||
if skip_tags:
|
if skip_tags:
|
||||||
invocation_items.append('Skip Tags: %s' % ', '.join(skip_tags))
|
invocation_items.append(f"Skip Tags: {', '.join(skip_tags)}")
|
||||||
if subset:
|
if subset:
|
||||||
invocation_items.append('Limit: %s' % subset)
|
invocation_items.append(f'Limit: {subset}')
|
||||||
if extra_vars:
|
if extra_vars:
|
||||||
invocation_items.append('Extra Vars: %s' %
|
invocation_items.append(f"Extra Vars: {' '.join(extra_vars)}")
|
||||||
' '.join(extra_vars))
|
|
||||||
|
|
||||||
title.append('by *%s*' % context.CLIARGS['remote_user'])
|
title.append(f"by *{context.CLIARGS['remote_user']}*")
|
||||||
|
|
||||||
title.append('\n\n*%s*' % self.playbook_name)
|
title.append(f'\n\n*{self.playbook_name}*')
|
||||||
msg_items = [' '.join(title)]
|
msg_items = [' '.join(title)]
|
||||||
if invocation_items:
|
if invocation_items:
|
||||||
msg_items.append('```\n%s\n```' % '\n'.join(invocation_items))
|
_inv_item = '\n'.join(invocation_items)
|
||||||
|
msg_items.append(f'```\n{_inv_item}\n```')
|
||||||
|
|
||||||
msg = '\n'.join(msg_items)
|
msg = '\n'.join(msg_items)
|
||||||
|
|
||||||
@@ -192,8 +199,8 @@ class CallbackModule(CallbackBase):
|
|||||||
def v2_playbook_on_play_start(self, play):
|
def v2_playbook_on_play_start(self, play):
|
||||||
"""Display Play start messages"""
|
"""Display Play start messages"""
|
||||||
|
|
||||||
name = play.name or 'Play name not specified (%s)' % play._uuid
|
name = play.name or f'Play name not specified ({play._uuid})'
|
||||||
msg = '*Starting play* (_%s_)\n\n*%s*' % (self.guid, name)
|
msg = f'*Starting play* (_{self.guid}_)\n\n*{name}*'
|
||||||
attachments = [
|
attachments = [
|
||||||
{
|
{
|
||||||
'fallback': msg,
|
'fallback': msg,
|
||||||
@@ -228,7 +235,7 @@ class CallbackModule(CallbackBase):
|
|||||||
|
|
||||||
attachments = []
|
attachments = []
|
||||||
msg_items = [
|
msg_items = [
|
||||||
'*Playbook Complete* (_%s_)' % self.guid
|
f'*Playbook Complete* (_{self.guid}_)'
|
||||||
]
|
]
|
||||||
if failures or unreachable:
|
if failures or unreachable:
|
||||||
color = 'danger'
|
color = 'danger'
|
||||||
@@ -237,7 +244,7 @@ class CallbackModule(CallbackBase):
|
|||||||
color = 'good'
|
color = 'good'
|
||||||
msg_items.append('\n*Success!*')
|
msg_items.append('\n*Success!*')
|
||||||
|
|
||||||
msg_items.append('```\n%s\n```' % t)
|
msg_items.append(f'```\n{t}\n```')
|
||||||
|
|
||||||
msg = '\n'.join(msg_items)
|
msg = '\n'.join(msg_items)
|
||||||
|
|
||||||
|
|||||||
@@ -3,76 +3,75 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: splunk
|
name: splunk
|
||||||
type: notification
|
type: notification
|
||||||
short_description: Sends task result events to Splunk HTTP Event Collector
|
short_description: Sends task result events to Splunk HTTP Event Collector
|
||||||
author: "Stuart Hirst (!UNKNOWN) <support@convergingdata.com>"
|
author: "Stuart Hirst (!UNKNOWN) <support@convergingdata.com>"
|
||||||
|
description:
|
||||||
|
- This callback plugin will send task results as JSON formatted events to a Splunk HTTP collector.
|
||||||
|
- The companion Splunk Monitoring & Diagnostics App is available here U(https://splunkbase.splunk.com/app/4023/).
|
||||||
|
- Credit to "Ryan Currah (@ryancurrah)" for original source upon which this is based.
|
||||||
|
requirements:
|
||||||
|
- Whitelisting this callback plugin
|
||||||
|
- 'Create a HTTP Event Collector in Splunk'
|
||||||
|
- 'Define the URL and token in C(ansible.cfg)'
|
||||||
|
options:
|
||||||
|
url:
|
||||||
|
description: URL to the Splunk HTTP collector source.
|
||||||
|
type: str
|
||||||
|
env:
|
||||||
|
- name: SPLUNK_URL
|
||||||
|
ini:
|
||||||
|
- section: callback_splunk
|
||||||
|
key: url
|
||||||
|
authtoken:
|
||||||
|
description: Token to authenticate the connection to the Splunk HTTP collector.
|
||||||
|
type: str
|
||||||
|
env:
|
||||||
|
- name: SPLUNK_AUTHTOKEN
|
||||||
|
ini:
|
||||||
|
- section: callback_splunk
|
||||||
|
key: authtoken
|
||||||
|
validate_certs:
|
||||||
|
description: Whether to validate certificates for connections to HEC. It is not recommended to set to V(false) except
|
||||||
|
when you are sure that nobody can intercept the connection between this plugin and HEC, as setting it to V(false) allows
|
||||||
|
man-in-the-middle attacks!
|
||||||
|
env:
|
||||||
|
- name: SPLUNK_VALIDATE_CERTS
|
||||||
|
ini:
|
||||||
|
- section: callback_splunk
|
||||||
|
key: validate_certs
|
||||||
|
type: bool
|
||||||
|
default: true
|
||||||
|
version_added: '1.0.0'
|
||||||
|
include_milliseconds:
|
||||||
|
description: Whether to include milliseconds as part of the generated timestamp field in the event sent to the Splunk
|
||||||
|
HTTP collector.
|
||||||
|
env:
|
||||||
|
- name: SPLUNK_INCLUDE_MILLISECONDS
|
||||||
|
ini:
|
||||||
|
- section: callback_splunk
|
||||||
|
key: include_milliseconds
|
||||||
|
type: bool
|
||||||
|
default: false
|
||||||
|
version_added: 2.0.0
|
||||||
|
batch:
|
||||||
description:
|
description:
|
||||||
- This callback plugin will send task results as JSON formatted events to a Splunk HTTP collector.
|
- Correlation ID which can be set across multiple playbook executions.
|
||||||
- The companion Splunk Monitoring & Diagnostics App is available here U(https://splunkbase.splunk.com/app/4023/).
|
env:
|
||||||
- Credit to "Ryan Currah (@ryancurrah)" for original source upon which this is based.
|
- name: SPLUNK_BATCH
|
||||||
requirements:
|
ini:
|
||||||
- Whitelisting this callback plugin
|
- section: callback_splunk
|
||||||
- 'Create a HTTP Event Collector in Splunk'
|
key: batch
|
||||||
- 'Define the URL and token in C(ansible.cfg)'
|
type: str
|
||||||
options:
|
version_added: 3.3.0
|
||||||
url:
|
"""
|
||||||
description: URL to the Splunk HTTP collector source.
|
|
||||||
type: str
|
|
||||||
env:
|
|
||||||
- name: SPLUNK_URL
|
|
||||||
ini:
|
|
||||||
- section: callback_splunk
|
|
||||||
key: url
|
|
||||||
authtoken:
|
|
||||||
description: Token to authenticate the connection to the Splunk HTTP collector.
|
|
||||||
type: str
|
|
||||||
env:
|
|
||||||
- name: SPLUNK_AUTHTOKEN
|
|
||||||
ini:
|
|
||||||
- section: callback_splunk
|
|
||||||
key: authtoken
|
|
||||||
validate_certs:
|
|
||||||
description: Whether to validate certificates for connections to HEC. It is not recommended to set to
|
|
||||||
V(false) except when you are sure that nobody can intercept the connection
|
|
||||||
between this plugin and HEC, as setting it to V(false) allows man-in-the-middle attacks!
|
|
||||||
env:
|
|
||||||
- name: SPLUNK_VALIDATE_CERTS
|
|
||||||
ini:
|
|
||||||
- section: callback_splunk
|
|
||||||
key: validate_certs
|
|
||||||
type: bool
|
|
||||||
default: true
|
|
||||||
version_added: '1.0.0'
|
|
||||||
include_milliseconds:
|
|
||||||
description: Whether to include milliseconds as part of the generated timestamp field in the event
|
|
||||||
sent to the Splunk HTTP collector.
|
|
||||||
env:
|
|
||||||
- name: SPLUNK_INCLUDE_MILLISECONDS
|
|
||||||
ini:
|
|
||||||
- section: callback_splunk
|
|
||||||
key: include_milliseconds
|
|
||||||
type: bool
|
|
||||||
default: false
|
|
||||||
version_added: 2.0.0
|
|
||||||
batch:
|
|
||||||
description:
|
|
||||||
- Correlation ID which can be set across multiple playbook executions.
|
|
||||||
env:
|
|
||||||
- name: SPLUNK_BATCH
|
|
||||||
ini:
|
|
||||||
- section: callback_splunk
|
|
||||||
key: batch
|
|
||||||
type: str
|
|
||||||
version_added: 3.3.0
|
|
||||||
'''
|
|
||||||
|
|
||||||
EXAMPLES = '''
|
EXAMPLES = r"""
|
||||||
examples: >
|
examples: >-
|
||||||
To enable, add this to your ansible.cfg file in the defaults block
|
To enable, add this to your ansible.cfg file in the defaults block
|
||||||
[defaults]
|
[defaults]
|
||||||
callback_whitelist = community.general.splunk
|
callback_whitelist = community.general.splunk
|
||||||
@@ -83,7 +82,7 @@ examples: >
|
|||||||
[callback_splunk]
|
[callback_splunk]
|
||||||
url = http://mysplunkinstance.datapaas.io:8088/services/collector/event
|
url = http://mysplunkinstance.datapaas.io:8088/services/collector/event
|
||||||
authtoken = f23blad6-5965-4537-bf69-5b5a545blabla88
|
authtoken = f23blad6-5965-4537-bf69-5b5a545blabla88
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import uuid
|
import uuid
|
||||||
@@ -153,15 +152,14 @@ class SplunkHTTPCollectorSource(object):
|
|||||||
data['ansible_result'] = result._result
|
data['ansible_result'] = result._result
|
||||||
|
|
||||||
# This wraps the json payload in and outer json event needed by Splunk
|
# This wraps the json payload in and outer json event needed by Splunk
|
||||||
jsondata = json.dumps(data, cls=AnsibleJSONEncoder, sort_keys=True)
|
jsondata = json.dumps({"event": data}, cls=AnsibleJSONEncoder, sort_keys=True)
|
||||||
jsondata = '{"event":' + jsondata + "}"
|
|
||||||
|
|
||||||
open_url(
|
open_url(
|
||||||
url,
|
url,
|
||||||
jsondata,
|
jsondata,
|
||||||
headers={
|
headers={
|
||||||
'Content-type': 'application/json',
|
'Content-type': 'application/json',
|
||||||
'Authorization': 'Splunk ' + authtoken
|
'Authorization': f"Splunk {authtoken}"
|
||||||
},
|
},
|
||||||
method='POST',
|
method='POST',
|
||||||
validate_certs=validate_certs
|
validate_certs=validate_certs
|
||||||
|
|||||||
@@ -3,10 +3,9 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = r'''
|
DOCUMENTATION = r"""
|
||||||
name: sumologic
|
name: sumologic
|
||||||
type: notification
|
type: notification
|
||||||
short_description: Sends task result events to Sumologic
|
short_description: Sends task result events to Sumologic
|
||||||
@@ -15,8 +14,8 @@ description:
|
|||||||
- This callback plugin will send task results as JSON formatted events to a Sumologic HTTP collector source.
|
- This callback plugin will send task results as JSON formatted events to a Sumologic HTTP collector source.
|
||||||
requirements:
|
requirements:
|
||||||
- Whitelisting this callback plugin
|
- Whitelisting this callback plugin
|
||||||
- 'Create a HTTP collector source in Sumologic and specify a custom timestamp format of V(yyyy-MM-dd HH:mm:ss ZZZZ) and a custom timestamp locator
|
- 'Create a HTTP collector source in Sumologic and specify a custom timestamp format of V(yyyy-MM-dd HH:mm:ss ZZZZ) and
|
||||||
of V("timestamp": "(.*\)")'
|
a custom timestamp locator of V("timestamp": "(.*\)")'
|
||||||
options:
|
options:
|
||||||
url:
|
url:
|
||||||
description: URL to the Sumologic HTTP collector source.
|
description: URL to the Sumologic HTTP collector source.
|
||||||
@@ -26,10 +25,10 @@ options:
|
|||||||
ini:
|
ini:
|
||||||
- section: callback_sumologic
|
- section: callback_sumologic
|
||||||
key: url
|
key: url
|
||||||
'''
|
"""
|
||||||
|
|
||||||
EXAMPLES = '''
|
EXAMPLES = r"""
|
||||||
examples: |
|
examples: |-
|
||||||
To enable, add this to your ansible.cfg file in the defaults block
|
To enable, add this to your ansible.cfg file in the defaults block
|
||||||
[defaults]
|
[defaults]
|
||||||
callback_whitelist = community.general.sumologic
|
callback_whitelist = community.general.sumologic
|
||||||
@@ -40,7 +39,7 @@ examples: |
|
|||||||
Set the ansible.cfg variable in the callback_sumologic block
|
Set the ansible.cfg variable in the callback_sumologic block
|
||||||
[callback_sumologic]
|
[callback_sumologic]
|
||||||
url = https://endpoint1.collection.us2.sumologic.com/receiver/v1/http/R8moSv1d8EW9LAUFZJ6dbxCFxwLH6kfCdcBfddlfxCbLuL-BN5twcTpMk__pYy_cDmp==
|
url = https://endpoint1.collection.us2.sumologic.com/receiver/v1/http/R8moSv1d8EW9LAUFZJ6dbxCFxwLH6kfCdcBfddlfxCbLuL-BN5twcTpMk__pYy_cDmp==
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import uuid
|
import uuid
|
||||||
|
|||||||
@@ -4,57 +4,56 @@
|
|||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
# Make coding more python3-ish
|
# Make coding more python3-ish
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: syslog_json
|
name: syslog_json
|
||||||
type: notification
|
type: notification
|
||||||
requirements:
|
requirements:
|
||||||
- whitelist in configuration
|
- whitelist in configuration
|
||||||
short_description: sends JSON events to syslog
|
short_description: sends JSON events to syslog
|
||||||
description:
|
description:
|
||||||
- This plugin logs ansible-playbook and ansible runs to a syslog server in JSON format.
|
- This plugin logs ansible-playbook and ansible runs to a syslog server in JSON format.
|
||||||
options:
|
options:
|
||||||
server:
|
server:
|
||||||
description: Syslog server that will receive the event.
|
description: Syslog server that will receive the event.
|
||||||
type: str
|
type: str
|
||||||
env:
|
env:
|
||||||
- name: SYSLOG_SERVER
|
- name: SYSLOG_SERVER
|
||||||
default: localhost
|
default: localhost
|
||||||
ini:
|
ini:
|
||||||
- section: callback_syslog_json
|
- section: callback_syslog_json
|
||||||
key: syslog_server
|
key: syslog_server
|
||||||
port:
|
port:
|
||||||
description: Port on which the syslog server is listening.
|
description: Port on which the syslog server is listening.
|
||||||
type: int
|
type: int
|
||||||
env:
|
env:
|
||||||
- name: SYSLOG_PORT
|
- name: SYSLOG_PORT
|
||||||
default: 514
|
default: 514
|
||||||
ini:
|
ini:
|
||||||
- section: callback_syslog_json
|
- section: callback_syslog_json
|
||||||
key: syslog_port
|
key: syslog_port
|
||||||
facility:
|
facility:
|
||||||
description: Syslog facility to log as.
|
description: Syslog facility to log as.
|
||||||
type: str
|
type: str
|
||||||
env:
|
env:
|
||||||
- name: SYSLOG_FACILITY
|
- name: SYSLOG_FACILITY
|
||||||
default: user
|
default: user
|
||||||
ini:
|
ini:
|
||||||
- section: callback_syslog_json
|
- section: callback_syslog_json
|
||||||
key: syslog_facility
|
key: syslog_facility
|
||||||
setup:
|
setup:
|
||||||
description: Log setup tasks.
|
description: Log setup tasks.
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_SYSLOG_SETUP
|
- name: ANSIBLE_SYSLOG_SETUP
|
||||||
type: bool
|
type: bool
|
||||||
default: true
|
default: true
|
||||||
ini:
|
ini:
|
||||||
- section: callback_syslog_json
|
- section: callback_syslog_json
|
||||||
key: syslog_setup
|
key: syslog_setup
|
||||||
version_added: 4.5.0
|
version_added: 4.5.0
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
|
|||||||
@@ -5,51 +5,49 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import annotations
|
||||||
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
name: timestamp
|
name: timestamp
|
||||||
type: stdout
|
type: stdout
|
||||||
short_description: Adds simple timestamp for each header
|
short_description: Adds simple timestamp for each header
|
||||||
version_added: 9.0.0
|
version_added: 9.0.0
|
||||||
description:
|
description:
|
||||||
- This callback adds simple timestamp for each header.
|
- This callback adds simple timestamp for each header.
|
||||||
author: kurokobo (@kurokobo)
|
author: kurokobo (@kurokobo)
|
||||||
options:
|
options:
|
||||||
timezone:
|
timezone:
|
||||||
description:
|
description:
|
||||||
- Timezone to use for the timestamp in IANA time zone format.
|
- Timezone to use for the timestamp in IANA time zone format.
|
||||||
- For example C(America/New_York), C(Asia/Tokyo)). Ignored on Python < 3.9.
|
- For example V(America/New_York), V(Asia/Tokyo)). Ignored on Python < 3.9.
|
||||||
ini:
|
ini:
|
||||||
- section: callback_timestamp
|
- section: callback_timestamp
|
||||||
key: timezone
|
key: timezone
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_CALLBACK_TIMESTAMP_TIMEZONE
|
- name: ANSIBLE_CALLBACK_TIMESTAMP_TIMEZONE
|
||||||
type: string
|
type: string
|
||||||
format_string:
|
format_string:
|
||||||
description:
|
description:
|
||||||
- Format of the timestamp shown to user in 1989 C standard format.
|
- Format of the timestamp shown to user in 1989 C standard format.
|
||||||
- >
|
- Refer to L(the Python documentation,https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes)
|
||||||
Refer to L(the Python documentation,https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes)
|
for the available format codes.
|
||||||
for the available format codes.
|
ini:
|
||||||
ini:
|
- section: callback_timestamp
|
||||||
- section: callback_timestamp
|
key: format_string
|
||||||
key: format_string
|
env:
|
||||||
env:
|
- name: ANSIBLE_CALLBACK_TIMESTAMP_FORMAT_STRING
|
||||||
- name: ANSIBLE_CALLBACK_TIMESTAMP_FORMAT_STRING
|
default: "%H:%M:%S"
|
||||||
default: "%H:%M:%S"
|
type: string
|
||||||
type: string
|
seealso:
|
||||||
seealso:
|
- plugin: ansible.posix.profile_tasks
|
||||||
- plugin: ansible.posix.profile_tasks
|
plugin_type: callback
|
||||||
plugin_type: callback
|
description: >-
|
||||||
description: >
|
You can use P(ansible.posix.profile_tasks#callback) callback plugin to time individual tasks and overall execution time
|
||||||
You can use P(ansible.posix.profile_tasks#callback) callback plugin to time individual tasks and overall execution time
|
with detailed timestamps.
|
||||||
with detailed timestamps.
|
extends_documentation_fragment:
|
||||||
extends_documentation_fragment:
|
- ansible.builtin.default_callback
|
||||||
- ansible.builtin.default_callback
|
- ansible.builtin.result_format_callback
|
||||||
- ansible.builtin.result_format_callback
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
@@ -85,7 +83,7 @@ def banner(self, msg, color=None, cows=True):
|
|||||||
msg = to_text(msg)
|
msg = to_text(msg)
|
||||||
if self.b_cowsay and cows:
|
if self.b_cowsay and cows:
|
||||||
try:
|
try:
|
||||||
self.banner_cowsay("%s @ %s" % (msg, timestamp))
|
self.banner_cowsay(f"{msg} @ {timestamp}")
|
||||||
return
|
return
|
||||||
except OSError:
|
except OSError:
|
||||||
self.warning("somebody cleverly deleted cowsay or something during the PB run. heh.")
|
self.warning("somebody cleverly deleted cowsay or something during the PB run. heh.")
|
||||||
@@ -98,7 +96,7 @@ def banner(self, msg, color=None, cows=True):
|
|||||||
if star_len <= 3:
|
if star_len <= 3:
|
||||||
star_len = 3
|
star_len = 3
|
||||||
stars = "*" * star_len
|
stars = "*" * star_len
|
||||||
self.display("\n%s %s %s" % (msg, stars, timestamp), color=color)
|
self.display(f"\n{msg} {stars} {timestamp}", color=color)
|
||||||
|
|
||||||
|
|
||||||
class CallbackModule(Default):
|
class CallbackModule(Default):
|
||||||
|
|||||||
@@ -5,21 +5,20 @@
|
|||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
# Make coding more python3-ish
|
# Make coding more python3-ish
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: unixy
|
name: unixy
|
||||||
type: stdout
|
type: stdout
|
||||||
author: Al Bowles (@akatch)
|
author: Al Bowles (@akatch)
|
||||||
short_description: condensed Ansible output
|
short_description: condensed Ansible output
|
||||||
description:
|
description:
|
||||||
- Consolidated Ansible output in the style of LINUX/UNIX startup logs.
|
- Consolidated Ansible output in the style of LINUX/UNIX startup logs.
|
||||||
extends_documentation_fragment:
|
extends_documentation_fragment:
|
||||||
- default_callback
|
- default_callback
|
||||||
requirements:
|
requirements:
|
||||||
- set as stdout in configuration
|
- set as stdout in configuration
|
||||||
'''
|
"""
|
||||||
|
|
||||||
from os.path import basename
|
from os.path import basename
|
||||||
from ansible import constants as C
|
from ansible import constants as C
|
||||||
@@ -67,24 +66,24 @@ class CallbackModule(CallbackModule_default):
|
|||||||
|
|
||||||
def _process_result_output(self, result, msg):
|
def _process_result_output(self, result, msg):
|
||||||
task_host = result._host.get_name()
|
task_host = result._host.get_name()
|
||||||
task_result = "%s %s" % (task_host, msg)
|
task_result = f"{task_host} {msg}"
|
||||||
|
|
||||||
if self._run_is_verbose(result):
|
if self._run_is_verbose(result):
|
||||||
task_result = "%s %s: %s" % (task_host, msg, self._dump_results(result._result, indent=4))
|
task_result = f"{task_host} {msg}: {self._dump_results(result._result, indent=4)}"
|
||||||
return task_result
|
return task_result
|
||||||
|
|
||||||
if self.delegated_vars:
|
if self.delegated_vars:
|
||||||
task_delegate_host = self.delegated_vars['ansible_host']
|
task_delegate_host = self.delegated_vars['ansible_host']
|
||||||
task_result = "%s -> %s %s" % (task_host, task_delegate_host, msg)
|
task_result = f"{task_host} -> {task_delegate_host} {msg}"
|
||||||
|
|
||||||
if result._result.get('msg') and result._result.get('msg') != "All items completed":
|
if result._result.get('msg') and result._result.get('msg') != "All items completed":
|
||||||
task_result += " | msg: " + to_text(result._result.get('msg'))
|
task_result += f" | msg: {to_text(result._result.get('msg'))}"
|
||||||
|
|
||||||
if result._result.get('stdout'):
|
if result._result.get('stdout'):
|
||||||
task_result += " | stdout: " + result._result.get('stdout')
|
task_result += f" | stdout: {result._result.get('stdout')}"
|
||||||
|
|
||||||
if result._result.get('stderr'):
|
if result._result.get('stderr'):
|
||||||
task_result += " | stderr: " + result._result.get('stderr')
|
task_result += f" | stderr: {result._result.get('stderr')}"
|
||||||
|
|
||||||
return task_result
|
return task_result
|
||||||
|
|
||||||
@@ -92,30 +91,30 @@ class CallbackModule(CallbackModule_default):
|
|||||||
self._get_task_display_name(task)
|
self._get_task_display_name(task)
|
||||||
if self.task_display_name is not None:
|
if self.task_display_name is not None:
|
||||||
if task.check_mode and self.get_option('check_mode_markers'):
|
if task.check_mode and self.get_option('check_mode_markers'):
|
||||||
self._display.display("%s (check mode)..." % self.task_display_name)
|
self._display.display(f"{self.task_display_name} (check mode)...")
|
||||||
else:
|
else:
|
||||||
self._display.display("%s..." % self.task_display_name)
|
self._display.display(f"{self.task_display_name}...")
|
||||||
|
|
||||||
def v2_playbook_on_handler_task_start(self, task):
|
def v2_playbook_on_handler_task_start(self, task):
|
||||||
self._get_task_display_name(task)
|
self._get_task_display_name(task)
|
||||||
if self.task_display_name is not None:
|
if self.task_display_name is not None:
|
||||||
if task.check_mode and self.get_option('check_mode_markers'):
|
if task.check_mode and self.get_option('check_mode_markers'):
|
||||||
self._display.display("%s (via handler in check mode)... " % self.task_display_name)
|
self._display.display(f"{self.task_display_name} (via handler in check mode)... ")
|
||||||
else:
|
else:
|
||||||
self._display.display("%s (via handler)... " % self.task_display_name)
|
self._display.display(f"{self.task_display_name} (via handler)... ")
|
||||||
|
|
||||||
def v2_playbook_on_play_start(self, play):
|
def v2_playbook_on_play_start(self, play):
|
||||||
name = play.get_name().strip()
|
name = play.get_name().strip()
|
||||||
if play.check_mode and self.get_option('check_mode_markers'):
|
if play.check_mode and self.get_option('check_mode_markers'):
|
||||||
if name and play.hosts:
|
if name and play.hosts:
|
||||||
msg = u"\n- %s (in check mode) on hosts: %s -" % (name, ",".join(play.hosts))
|
msg = f"\n- {name} (in check mode) on hosts: {','.join(play.hosts)} -"
|
||||||
else:
|
else:
|
||||||
msg = u"- check mode -"
|
msg = "- check mode -"
|
||||||
else:
|
else:
|
||||||
if name and play.hosts:
|
if name and play.hosts:
|
||||||
msg = u"\n- %s on hosts: %s -" % (name, ",".join(play.hosts))
|
msg = f"\n- {name} on hosts: {','.join(play.hosts)} -"
|
||||||
else:
|
else:
|
||||||
msg = u"---"
|
msg = "---"
|
||||||
|
|
||||||
self._display.display(msg)
|
self._display.display(msg)
|
||||||
|
|
||||||
@@ -126,7 +125,7 @@ class CallbackModule(CallbackModule_default):
|
|||||||
msg = "skipped"
|
msg = "skipped"
|
||||||
|
|
||||||
task_result = self._process_result_output(result, msg)
|
task_result = self._process_result_output(result, msg)
|
||||||
self._display.display(" " + task_result, display_color)
|
self._display.display(f" {task_result}", display_color)
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -136,10 +135,10 @@ class CallbackModule(CallbackModule_default):
|
|||||||
msg = "failed"
|
msg = "failed"
|
||||||
item_value = self._get_item_label(result._result)
|
item_value = self._get_item_label(result._result)
|
||||||
if item_value:
|
if item_value:
|
||||||
msg += " | item: %s" % (item_value,)
|
msg += f" | item: {item_value}"
|
||||||
|
|
||||||
task_result = self._process_result_output(result, msg)
|
task_result = self._process_result_output(result, msg)
|
||||||
self._display.display(" " + task_result, display_color, stderr=self.get_option('display_failed_stderr'))
|
self._display.display(f" {task_result}", display_color, stderr=self.get_option('display_failed_stderr'))
|
||||||
|
|
||||||
def v2_runner_on_ok(self, result, msg="ok", display_color=C.COLOR_OK):
|
def v2_runner_on_ok(self, result, msg="ok", display_color=C.COLOR_OK):
|
||||||
self._preprocess_result(result)
|
self._preprocess_result(result)
|
||||||
@@ -149,13 +148,13 @@ class CallbackModule(CallbackModule_default):
|
|||||||
msg = "done"
|
msg = "done"
|
||||||
item_value = self._get_item_label(result._result)
|
item_value = self._get_item_label(result._result)
|
||||||
if item_value:
|
if item_value:
|
||||||
msg += " | item: %s" % (item_value,)
|
msg += f" | item: {item_value}"
|
||||||
display_color = C.COLOR_CHANGED
|
display_color = C.COLOR_CHANGED
|
||||||
task_result = self._process_result_output(result, msg)
|
task_result = self._process_result_output(result, msg)
|
||||||
self._display.display(" " + task_result, display_color)
|
self._display.display(f" {task_result}", display_color)
|
||||||
elif self.get_option('display_ok_hosts'):
|
elif self.get_option('display_ok_hosts'):
|
||||||
task_result = self._process_result_output(result, msg)
|
task_result = self._process_result_output(result, msg)
|
||||||
self._display.display(" " + task_result, display_color)
|
self._display.display(f" {task_result}", display_color)
|
||||||
|
|
||||||
def v2_runner_item_on_skipped(self, result):
|
def v2_runner_item_on_skipped(self, result):
|
||||||
self.v2_runner_on_skipped(result)
|
self.v2_runner_on_skipped(result)
|
||||||
@@ -173,7 +172,7 @@ class CallbackModule(CallbackModule_default):
|
|||||||
display_color = C.COLOR_UNREACHABLE
|
display_color = C.COLOR_UNREACHABLE
|
||||||
task_result = self._process_result_output(result, msg)
|
task_result = self._process_result_output(result, msg)
|
||||||
|
|
||||||
self._display.display(" " + task_result, display_color, stderr=self.get_option('display_failed_stderr'))
|
self._display.display(f" {task_result}", display_color, stderr=self.get_option('display_failed_stderr'))
|
||||||
|
|
||||||
def v2_on_file_diff(self, result):
|
def v2_on_file_diff(self, result):
|
||||||
if result._task.loop and 'results' in result._result:
|
if result._task.loop and 'results' in result._result:
|
||||||
@@ -195,25 +194,17 @@ class CallbackModule(CallbackModule_default):
|
|||||||
# TODO how else can we display these?
|
# TODO how else can we display these?
|
||||||
t = stats.summarize(h)
|
t = stats.summarize(h)
|
||||||
|
|
||||||
self._display.display(u" %s : %s %s %s %s %s %s" % (
|
self._display.display(
|
||||||
hostcolor(h, t),
|
f" {hostcolor(h, t)} : {colorize('ok', t['ok'], C.COLOR_OK)} {colorize('changed', t['changed'], C.COLOR_CHANGED)} "
|
||||||
colorize(u'ok', t['ok'], C.COLOR_OK),
|
f"{colorize('unreachable', t['unreachable'], C.COLOR_UNREACHABLE)} {colorize('failed', t['failures'], C.COLOR_ERROR)} "
|
||||||
colorize(u'changed', t['changed'], C.COLOR_CHANGED),
|
f"{colorize('rescued', t['rescued'], C.COLOR_OK)} {colorize('ignored', t['ignored'], C.COLOR_WARN)}",
|
||||||
colorize(u'unreachable', t['unreachable'], C.COLOR_UNREACHABLE),
|
|
||||||
colorize(u'failed', t['failures'], C.COLOR_ERROR),
|
|
||||||
colorize(u'rescued', t['rescued'], C.COLOR_OK),
|
|
||||||
colorize(u'ignored', t['ignored'], C.COLOR_WARN)),
|
|
||||||
screen_only=True
|
screen_only=True
|
||||||
)
|
)
|
||||||
|
|
||||||
self._display.display(u" %s : %s %s %s %s %s %s" % (
|
self._display.display(
|
||||||
hostcolor(h, t, False),
|
f" {hostcolor(h, t, False)} : {colorize('ok', t['ok'], None)} {colorize('changed', t['changed'], None)} "
|
||||||
colorize(u'ok', t['ok'], None),
|
f"{colorize('unreachable', t['unreachable'], None)} {colorize('failed', t['failures'], None)} {colorize('rescued', t['rescued'], None)} "
|
||||||
colorize(u'changed', t['changed'], None),
|
f"{colorize('ignored', t['ignored'], None)}",
|
||||||
colorize(u'unreachable', t['unreachable'], None),
|
|
||||||
colorize(u'failed', t['failures'], None),
|
|
||||||
colorize(u'rescued', t['rescued'], None),
|
|
||||||
colorize(u'ignored', t['ignored'], None)),
|
|
||||||
log_only=True
|
log_only=True
|
||||||
)
|
)
|
||||||
if stats.custom and self.get_option('show_custom_stats'):
|
if stats.custom and self.get_option('show_custom_stats'):
|
||||||
@@ -223,12 +214,14 @@ class CallbackModule(CallbackModule_default):
|
|||||||
for k in sorted(stats.custom.keys()):
|
for k in sorted(stats.custom.keys()):
|
||||||
if k == '_run':
|
if k == '_run':
|
||||||
continue
|
continue
|
||||||
self._display.display('\t%s: %s' % (k, self._dump_results(stats.custom[k], indent=1).replace('\n', '')))
|
stat_val = self._dump_results(stats.custom[k], indent=1).replace('\n', '')
|
||||||
|
self._display.display(f'\t{k}: {stat_val}')
|
||||||
|
|
||||||
# print per run custom stats
|
# print per run custom stats
|
||||||
if '_run' in stats.custom:
|
if '_run' in stats.custom:
|
||||||
self._display.display("", screen_only=True)
|
self._display.display("", screen_only=True)
|
||||||
self._display.display('\tRUN: %s' % self._dump_results(stats.custom['_run'], indent=1).replace('\n', ''))
|
stat_val_run = self._dump_results(stats.custom['_run'], indent=1).replace('\n', '')
|
||||||
|
self._display.display(f'\tRUN: {stat_val_run}')
|
||||||
self._display.display("", screen_only=True)
|
self._display.display("", screen_only=True)
|
||||||
|
|
||||||
def v2_playbook_on_no_hosts_matched(self):
|
def v2_playbook_on_no_hosts_matched(self):
|
||||||
@@ -239,23 +232,23 @@ class CallbackModule(CallbackModule_default):
|
|||||||
|
|
||||||
def v2_playbook_on_start(self, playbook):
|
def v2_playbook_on_start(self, playbook):
|
||||||
if context.CLIARGS['check'] and self.get_option('check_mode_markers'):
|
if context.CLIARGS['check'] and self.get_option('check_mode_markers'):
|
||||||
self._display.display("Executing playbook %s in check mode" % basename(playbook._file_name))
|
self._display.display(f"Executing playbook {basename(playbook._file_name)} in check mode")
|
||||||
else:
|
else:
|
||||||
self._display.display("Executing playbook %s" % basename(playbook._file_name))
|
self._display.display(f"Executing playbook {basename(playbook._file_name)}")
|
||||||
|
|
||||||
# show CLI arguments
|
# show CLI arguments
|
||||||
if self._display.verbosity > 3:
|
if self._display.verbosity > 3:
|
||||||
if context.CLIARGS.get('args'):
|
if context.CLIARGS.get('args'):
|
||||||
self._display.display('Positional arguments: %s' % ' '.join(context.CLIARGS['args']),
|
self._display.display(f"Positional arguments: {' '.join(context.CLIARGS['args'])}",
|
||||||
color=C.COLOR_VERBOSE, screen_only=True)
|
color=C.COLOR_VERBOSE, screen_only=True)
|
||||||
|
|
||||||
for argument in (a for a in context.CLIARGS if a != 'args'):
|
for argument in (a for a in context.CLIARGS if a != 'args'):
|
||||||
val = context.CLIARGS[argument]
|
val = context.CLIARGS[argument]
|
||||||
if val:
|
if val:
|
||||||
self._display.vvvv('%s: %s' % (argument, val))
|
self._display.vvvv(f'{argument}: {val}')
|
||||||
|
|
||||||
def v2_runner_retry(self, result):
|
def v2_runner_retry(self, result):
|
||||||
msg = " Retrying... (%d of %d)" % (result._result['attempts'], result._result['retries'])
|
msg = f" Retrying... ({result._result['attempts']} of {result._result['retries']})"
|
||||||
if self._run_is_verbose(result):
|
if self._run_is_verbose(result):
|
||||||
msg += "Result was: %s" % self._dump_results(result._result)
|
msg += f"Result was: {self._dump_results(result._result)}"
|
||||||
self._display.display(msg, color=C.COLOR_DEBUG)
|
self._display.display(msg, color=C.COLOR_DEBUG)
|
||||||
|
|||||||
@@ -4,40 +4,42 @@
|
|||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
# Make coding more python3-ish
|
# Make coding more python3-ish
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: yaml
|
name: yaml
|
||||||
type: stdout
|
type: stdout
|
||||||
short_description: YAML-ized Ansible screen output
|
short_description: YAML-ized Ansible screen output
|
||||||
description:
|
deprecated:
|
||||||
- Ansible output that can be quite a bit easier to read than the
|
removed_in: 12.0.0
|
||||||
default JSON formatting.
|
why: Starting in ansible-core 2.13, the P(ansible.builtin.default#callback) callback has support for printing output in
|
||||||
extends_documentation_fragment:
|
YAML format.
|
||||||
- default_callback
|
alternative: Use O(ansible.builtin.default#callback:result_format=yaml).
|
||||||
requirements:
|
description:
|
||||||
- set as stdout in configuration
|
- Ansible output that can be quite a bit easier to read than the default JSON formatting.
|
||||||
seealso:
|
extends_documentation_fragment:
|
||||||
- plugin: ansible.builtin.default
|
- default_callback
|
||||||
plugin_type: callback
|
requirements:
|
||||||
description: >
|
- set as stdout in configuration
|
||||||
There is a parameter O(ansible.builtin.default#callback:result_format) in P(ansible.builtin.default#callback)
|
seealso:
|
||||||
that allows you to change the output format to YAML.
|
- plugin: ansible.builtin.default
|
||||||
notes:
|
plugin_type: callback
|
||||||
- >
|
description: >-
|
||||||
With ansible-core 2.13 or newer, you can instead specify V(yaml) for the parameter O(ansible.builtin.default#callback:result_format)
|
There is a parameter O(ansible.builtin.default#callback:result_format) in P(ansible.builtin.default#callback) that allows
|
||||||
in P(ansible.builtin.default#callback).
|
you to change the output format to YAML.
|
||||||
'''
|
notes:
|
||||||
|
- With ansible-core 2.13 or newer, you can instead specify V(yaml) for the parameter O(ansible.builtin.default#callback:result_format)
|
||||||
|
in P(ansible.builtin.default#callback).
|
||||||
|
"""
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import string
|
import string
|
||||||
|
from collections.abc import Mapping, Sequence
|
||||||
|
|
||||||
from ansible.module_utils.common.text.converters import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
|
||||||
from ansible.plugins.callback import strip_internal_keys, module_response_deepcopy
|
from ansible.plugins.callback import strip_internal_keys, module_response_deepcopy
|
||||||
from ansible.plugins.callback.default import CallbackModule as Default
|
from ansible.plugins.callback.default import CallbackModule as Default
|
||||||
|
|
||||||
@@ -45,35 +47,83 @@ from ansible.plugins.callback.default import CallbackModule as Default
|
|||||||
# from http://stackoverflow.com/a/15423007/115478
|
# from http://stackoverflow.com/a/15423007/115478
|
||||||
def should_use_block(value):
|
def should_use_block(value):
|
||||||
"""Returns true if string should be in block format"""
|
"""Returns true if string should be in block format"""
|
||||||
for c in u"\u000a\u000d\u001c\u001d\u001e\u0085\u2028\u2029":
|
for c in "\u000a\u000d\u001c\u001d\u001e\u0085\u2028\u2029":
|
||||||
if c in value:
|
if c in value:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
class MyDumper(AnsibleDumper):
|
def adjust_str_value_for_block(value):
|
||||||
def represent_scalar(self, tag, value, style=None):
|
# we care more about readable than accuracy, so...
|
||||||
"""Uses block style for multi-line strings"""
|
# ...no trailing space
|
||||||
if style is None:
|
value = value.rstrip()
|
||||||
if should_use_block(value):
|
# ...and non-printable characters
|
||||||
style = '|'
|
value = ''.join(x for x in value if x in string.printable or ord(x) >= 0xA0)
|
||||||
# we care more about readable than accuracy, so...
|
# ...tabs prevent blocks from expanding
|
||||||
# ...no trailing space
|
value = value.expandtabs()
|
||||||
value = value.rstrip()
|
# ...and odd bits of whitespace
|
||||||
# ...and non-printable characters
|
value = re.sub(r'[\x0b\x0c\r]', '', value)
|
||||||
value = ''.join(x for x in value if x in string.printable or ord(x) >= 0xA0)
|
# ...as does trailing space
|
||||||
# ...tabs prevent blocks from expanding
|
value = re.sub(r' +\n', '\n', value)
|
||||||
value = value.expandtabs()
|
return value
|
||||||
# ...and odd bits of whitespace
|
|
||||||
value = re.sub(r'[\x0b\x0c\r]', '', value)
|
|
||||||
# ...as does trailing space
|
def create_string_node(tag, value, style, default_style):
|
||||||
value = re.sub(r' +\n', '\n', value)
|
if style is None:
|
||||||
else:
|
if should_use_block(value):
|
||||||
style = self.default_style
|
style = '|'
|
||||||
node = yaml.representer.ScalarNode(tag, value, style=style)
|
value = adjust_str_value_for_block(value)
|
||||||
if self.alias_key is not None:
|
else:
|
||||||
self.represented_objects[self.alias_key] = node
|
style = default_style
|
||||||
return node
|
return yaml.representer.ScalarNode(tag, value, style=style)
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ansible.module_utils.common.yaml import HAS_LIBYAML
|
||||||
|
# import below was added in https://github.com/ansible/ansible/pull/85039,
|
||||||
|
# first contained in ansible-core 2.19.0b2:
|
||||||
|
from ansible.utils.vars import transform_to_native_types
|
||||||
|
|
||||||
|
if HAS_LIBYAML:
|
||||||
|
from yaml.cyaml import CSafeDumper as SafeDumper
|
||||||
|
else:
|
||||||
|
from yaml import SafeDumper
|
||||||
|
|
||||||
|
class MyDumper(SafeDumper):
|
||||||
|
def represent_scalar(self, tag, value, style=None):
|
||||||
|
"""Uses block style for multi-line strings"""
|
||||||
|
node = create_string_node(tag, value, style, self.default_style)
|
||||||
|
if self.alias_key is not None:
|
||||||
|
self.represented_objects[self.alias_key] = node
|
||||||
|
return node
|
||||||
|
|
||||||
|
except ImportError:
|
||||||
|
# In case transform_to_native_types cannot be imported, we either have ansible-core 2.19.0b1
|
||||||
|
# (or some random commit from the devel or stable-2.19 branch after merging the DT changes
|
||||||
|
# and before transform_to_native_types was added), or we have a version without the DT changes.
|
||||||
|
|
||||||
|
# Here we simply assume we have a version without the DT changes, and thus can continue as
|
||||||
|
# with ansible-core 2.18 and before.
|
||||||
|
|
||||||
|
transform_to_native_types = None
|
||||||
|
|
||||||
|
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||||
|
|
||||||
|
class MyDumper(AnsibleDumper): # pylint: disable=inherit-non-class
|
||||||
|
def represent_scalar(self, tag, value, style=None):
|
||||||
|
"""Uses block style for multi-line strings"""
|
||||||
|
node = create_string_node(tag, value, style, self.default_style)
|
||||||
|
if self.alias_key is not None:
|
||||||
|
self.represented_objects[self.alias_key] = node
|
||||||
|
return node
|
||||||
|
|
||||||
|
|
||||||
|
def transform_recursively(value, transform):
|
||||||
|
if isinstance(value, Mapping):
|
||||||
|
return {transform(k): transform(v) for k, v in value.items()}
|
||||||
|
if isinstance(value, Sequence) and not isinstance(value, (str, bytes)):
|
||||||
|
return [transform(e) for e in value]
|
||||||
|
return transform(value)
|
||||||
|
|
||||||
|
|
||||||
class CallbackModule(Default):
|
class CallbackModule(Default):
|
||||||
@@ -113,11 +163,11 @@ class CallbackModule(Default):
|
|||||||
|
|
||||||
# put changed and skipped into a header line
|
# put changed and skipped into a header line
|
||||||
if 'changed' in abridged_result:
|
if 'changed' in abridged_result:
|
||||||
dumped += 'changed=' + str(abridged_result['changed']).lower() + ' '
|
dumped += f"changed={str(abridged_result['changed']).lower()} "
|
||||||
del abridged_result['changed']
|
del abridged_result['changed']
|
||||||
|
|
||||||
if 'skipped' in abridged_result:
|
if 'skipped' in abridged_result:
|
||||||
dumped += 'skipped=' + str(abridged_result['skipped']).lower() + ' '
|
dumped += f"skipped={str(abridged_result['skipped']).lower()} "
|
||||||
del abridged_result['skipped']
|
del abridged_result['skipped']
|
||||||
|
|
||||||
# if we already have stdout, we don't need stdout_lines
|
# if we already have stdout, we don't need stdout_lines
|
||||||
@@ -130,6 +180,8 @@ class CallbackModule(Default):
|
|||||||
|
|
||||||
if abridged_result:
|
if abridged_result:
|
||||||
dumped += '\n'
|
dumped += '\n'
|
||||||
|
if transform_to_native_types is not None:
|
||||||
|
abridged_result = transform_recursively(abridged_result, lambda v: transform_to_native_types(v, redact=False))
|
||||||
dumped += to_text(yaml.dump(abridged_result, allow_unicode=True, width=1000, Dumper=MyDumper, default_flow_style=False))
|
dumped += to_text(yaml.dump(abridged_result, allow_unicode=True, width=1000, Dumper=MyDumper, default_flow_style=False))
|
||||||
|
|
||||||
# indent by a couple of spaces
|
# indent by a couple of spaces
|
||||||
|
|||||||
@@ -7,94 +7,80 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Maykel Moya (!UNKNOWN) <mmoya@speedyrails.com>
|
author: Maykel Moya (!UNKNOWN) <mmoya@speedyrails.com>
|
||||||
name: chroot
|
name: chroot
|
||||||
short_description: Interact with local chroot
|
short_description: Interact with local chroot
|
||||||
|
description:
|
||||||
|
- Run commands or put/fetch files to an existing chroot on the Ansible controller.
|
||||||
|
options:
|
||||||
|
remote_addr:
|
||||||
description:
|
description:
|
||||||
- Run commands or put/fetch files to an existing chroot on the Ansible controller.
|
- The path of the chroot you want to access.
|
||||||
options:
|
type: string
|
||||||
remote_addr:
|
default: inventory_hostname
|
||||||
description:
|
vars:
|
||||||
- The path of the chroot you want to access.
|
- name: inventory_hostname
|
||||||
type: string
|
- name: ansible_host
|
||||||
default: inventory_hostname
|
executable:
|
||||||
vars:
|
description:
|
||||||
- name: inventory_hostname
|
- User specified executable shell.
|
||||||
- name: ansible_host
|
type: string
|
||||||
executable:
|
ini:
|
||||||
description:
|
- section: defaults
|
||||||
- User specified executable shell
|
key: executable
|
||||||
type: string
|
env:
|
||||||
ini:
|
- name: ANSIBLE_EXECUTABLE
|
||||||
- section: defaults
|
vars:
|
||||||
key: executable
|
- name: ansible_executable
|
||||||
env:
|
default: /bin/sh
|
||||||
- name: ANSIBLE_EXECUTABLE
|
chroot_exe:
|
||||||
vars:
|
description:
|
||||||
- name: ansible_executable
|
- User specified chroot binary.
|
||||||
default: /bin/sh
|
type: string
|
||||||
chroot_exe:
|
ini:
|
||||||
description:
|
- section: chroot_connection
|
||||||
- User specified chroot binary
|
key: exe
|
||||||
type: string
|
env:
|
||||||
ini:
|
- name: ANSIBLE_CHROOT_EXE
|
||||||
- section: chroot_connection
|
vars:
|
||||||
key: exe
|
- name: ansible_chroot_exe
|
||||||
env:
|
default: chroot
|
||||||
- name: ANSIBLE_CHROOT_EXE
|
disable_root_check:
|
||||||
vars:
|
description:
|
||||||
- name: ansible_chroot_exe
|
- Do not check that the user is not root.
|
||||||
default: chroot
|
ini:
|
||||||
disable_root_check:
|
- section: chroot_connection
|
||||||
description:
|
key: disable_root_check
|
||||||
- Do not check that the user is not root.
|
env:
|
||||||
ini:
|
- name: ANSIBLE_CHROOT_DISABLE_ROOT_CHECK
|
||||||
- section: chroot_connection
|
vars:
|
||||||
key: disable_root_check
|
- name: ansible_chroot_disable_root_check
|
||||||
env:
|
default: false
|
||||||
- name: ANSIBLE_CHROOT_DISABLE_ROOT_CHECK
|
type: bool
|
||||||
vars:
|
version_added: 7.3.0
|
||||||
- name: ansible_chroot_disable_root_check
|
"""
|
||||||
default: false
|
|
||||||
type: bool
|
|
||||||
version_added: 7.3.0
|
|
||||||
'''
|
|
||||||
|
|
||||||
EXAMPLES = r"""
|
EXAMPLES = r"""
|
||||||
# Plugin requires root privileges for chroot, -E preserves your env (and location of ~/.ansible):
|
|
||||||
# sudo -E ansible-playbook ...
|
|
||||||
#
|
|
||||||
# Static inventory file
|
|
||||||
# [chroots]
|
|
||||||
# /path/to/debootstrap
|
|
||||||
# /path/to/feboostrap
|
|
||||||
# /path/to/lxc-image
|
|
||||||
# /path/to/chroot
|
|
||||||
|
|
||||||
# playbook
|
|
||||||
---
|
|
||||||
- hosts: chroots
|
- hosts: chroots
|
||||||
connection: community.general.chroot
|
connection: community.general.chroot
|
||||||
tasks:
|
tasks:
|
||||||
- debug:
|
- debug:
|
||||||
msg: "This is coming from chroot environment"
|
msg: "This is coming from chroot environment"
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import subprocess
|
import subprocess
|
||||||
import traceback
|
import traceback
|
||||||
|
from shlex import quote as shlex_quote
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils.basic import is_executable
|
from ansible.module_utils.basic import is_executable
|
||||||
from ansible.module_utils.common.process import get_bin_path
|
from ansible.module_utils.common.process import get_bin_path
|
||||||
from ansible.module_utils.six.moves import shlex_quote
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
|
||||||
from ansible.plugins.connection import ConnectionBase, BUFSIZE
|
from ansible.plugins.connection import ConnectionBase, BUFSIZE
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
@@ -120,15 +106,15 @@ class Connection(ConnectionBase):
|
|||||||
|
|
||||||
# do some trivial checks for ensuring 'host' is actually a chroot'able dir
|
# do some trivial checks for ensuring 'host' is actually a chroot'able dir
|
||||||
if not os.path.isdir(self.chroot):
|
if not os.path.isdir(self.chroot):
|
||||||
raise AnsibleError("%s is not a directory" % self.chroot)
|
raise AnsibleError(f"{self.chroot} is not a directory")
|
||||||
|
|
||||||
chrootsh = os.path.join(self.chroot, 'bin/sh')
|
chrootsh = os.path.join(self.chroot, 'bin/sh')
|
||||||
# Want to check for a usable bourne shell inside the chroot.
|
# Want to check for a usable bourne shell inside the chroot.
|
||||||
# is_executable() == True is sufficient. For symlinks it
|
# is_executable() == True is sufficient. For symlinks it
|
||||||
# gets really complicated really fast. So we punt on finding that
|
# gets really complicated really fast. So we punt on finding that
|
||||||
# out. As long as it's a symlink we assume that it will work
|
# out. As long as it is a symlink we assume that it will work
|
||||||
if not (is_executable(chrootsh) or (os.path.lexists(chrootsh) and os.path.islink(chrootsh))):
|
if not (is_executable(chrootsh) or (os.path.lexists(chrootsh) and os.path.islink(chrootsh))):
|
||||||
raise AnsibleError("%s does not look like a chrootable dir (/bin/sh missing)" % self.chroot)
|
raise AnsibleError(f"{self.chroot} does not look like a chrootable dir (/bin/sh missing)")
|
||||||
|
|
||||||
def _connect(self):
|
def _connect(self):
|
||||||
""" connect to the chroot """
|
""" connect to the chroot """
|
||||||
@@ -143,7 +129,7 @@ class Connection(ConnectionBase):
|
|||||||
try:
|
try:
|
||||||
self.chroot_cmd = get_bin_path(self.get_option('chroot_exe'))
|
self.chroot_cmd = get_bin_path(self.get_option('chroot_exe'))
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise AnsibleError(to_native(e))
|
raise AnsibleError(str(e))
|
||||||
|
|
||||||
super(Connection, self)._connect()
|
super(Connection, self)._connect()
|
||||||
if not self._connected:
|
if not self._connected:
|
||||||
@@ -161,7 +147,7 @@ class Connection(ConnectionBase):
|
|||||||
executable = self.get_option('executable')
|
executable = self.get_option('executable')
|
||||||
local_cmd = [self.chroot_cmd, self.chroot, executable, '-c', cmd]
|
local_cmd = [self.chroot_cmd, self.chroot, executable, '-c', cmd]
|
||||||
|
|
||||||
display.vvv("EXEC %s" % local_cmd, host=self.chroot)
|
display.vvv(f"EXEC {local_cmd}", host=self.chroot)
|
||||||
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
||||||
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
|
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
@@ -186,7 +172,7 @@ class Connection(ConnectionBase):
|
|||||||
exist in any given chroot. So for now we're choosing "/" instead.
|
exist in any given chroot. So for now we're choosing "/" instead.
|
||||||
This also happens to be the former default.
|
This also happens to be the former default.
|
||||||
|
|
||||||
Can revisit using $HOME instead if it's a problem
|
Can revisit using $HOME instead if it is a problem
|
||||||
"""
|
"""
|
||||||
if not remote_path.startswith(os.path.sep):
|
if not remote_path.startswith(os.path.sep):
|
||||||
remote_path = os.path.join(os.path.sep, remote_path)
|
remote_path = os.path.join(os.path.sep, remote_path)
|
||||||
@@ -195,7 +181,7 @@ class Connection(ConnectionBase):
|
|||||||
def put_file(self, in_path, out_path):
|
def put_file(self, in_path, out_path):
|
||||||
""" transfer a file from local to chroot """
|
""" transfer a file from local to chroot """
|
||||||
super(Connection, self).put_file(in_path, out_path)
|
super(Connection, self).put_file(in_path, out_path)
|
||||||
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.chroot)
|
display.vvv(f"PUT {in_path} TO {out_path}", host=self.chroot)
|
||||||
|
|
||||||
out_path = shlex_quote(self._prefix_login_path(out_path))
|
out_path = shlex_quote(self._prefix_login_path(out_path))
|
||||||
try:
|
try:
|
||||||
@@ -205,27 +191,27 @@ class Connection(ConnectionBase):
|
|||||||
else:
|
else:
|
||||||
count = ''
|
count = ''
|
||||||
try:
|
try:
|
||||||
p = self._buffered_exec_command('dd of=%s bs=%s%s' % (out_path, BUFSIZE, count), stdin=in_file)
|
p = self._buffered_exec_command(f'dd of={out_path} bs={BUFSIZE}{count}', stdin=in_file)
|
||||||
except OSError:
|
except OSError:
|
||||||
raise AnsibleError("chroot connection requires dd command in the chroot")
|
raise AnsibleError("chroot connection requires dd command in the chroot")
|
||||||
try:
|
try:
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
except Exception:
|
except Exception:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
|
||||||
except IOError:
|
except IOError:
|
||||||
raise AnsibleError("file or module does not exist at: %s" % in_path)
|
raise AnsibleError(f"file or module does not exist at: {in_path}")
|
||||||
|
|
||||||
def fetch_file(self, in_path, out_path):
|
def fetch_file(self, in_path, out_path):
|
||||||
""" fetch a file from chroot to local """
|
""" fetch a file from chroot to local """
|
||||||
super(Connection, self).fetch_file(in_path, out_path)
|
super(Connection, self).fetch_file(in_path, out_path)
|
||||||
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.chroot)
|
display.vvv(f"FETCH {in_path} TO {out_path}", host=self.chroot)
|
||||||
|
|
||||||
in_path = shlex_quote(self._prefix_login_path(in_path))
|
in_path = shlex_quote(self._prefix_login_path(in_path))
|
||||||
try:
|
try:
|
||||||
p = self._buffered_exec_command('dd if=%s bs=%s' % (in_path, BUFSIZE))
|
p = self._buffered_exec_command(f'dd if={in_path} bs={BUFSIZE}')
|
||||||
except OSError:
|
except OSError:
|
||||||
raise AnsibleError("chroot connection requires dd command in the chroot")
|
raise AnsibleError("chroot connection requires dd command in the chroot")
|
||||||
|
|
||||||
@@ -237,10 +223,10 @@ class Connection(ConnectionBase):
|
|||||||
chunk = p.stdout.read(BUFSIZE)
|
chunk = p.stdout.read(BUFSIZE)
|
||||||
except Exception:
|
except Exception:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
""" terminate the connection; nothing to do here """
|
""" terminate the connection; nothing to do here """
|
||||||
|
|||||||
@@ -6,27 +6,26 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Michael Scherer (@mscherer) <misc@zarb.org>
|
author: Michael Scherer (@mscherer) <misc@zarb.org>
|
||||||
name: funcd
|
name: funcd
|
||||||
short_description: Use funcd to connect to target
|
short_description: Use funcd to connect to target
|
||||||
|
description:
|
||||||
|
- This transport permits you to use Ansible over Func.
|
||||||
|
- For people who have already setup func and that wish to play with ansible, this permit to move gradually to ansible without
|
||||||
|
having to redo completely the setup of the network.
|
||||||
|
options:
|
||||||
|
remote_addr:
|
||||||
description:
|
description:
|
||||||
- This transport permits you to use Ansible over Func.
|
- The path of the chroot you want to access.
|
||||||
- For people who have already setup func and that wish to play with ansible,
|
type: string
|
||||||
this permit to move gradually to ansible without having to redo completely the setup of the network.
|
default: inventory_hostname
|
||||||
options:
|
vars:
|
||||||
remote_addr:
|
- name: ansible_host
|
||||||
description:
|
- name: ansible_func_host
|
||||||
- The path of the chroot you want to access.
|
"""
|
||||||
type: string
|
|
||||||
default: inventory_hostname
|
|
||||||
vars:
|
|
||||||
- name: ansible_host
|
|
||||||
- name: ansible_func_host
|
|
||||||
'''
|
|
||||||
|
|
||||||
HAVE_FUNC = False
|
HAVE_FUNC = False
|
||||||
try:
|
try:
|
||||||
@@ -72,7 +71,7 @@ class Connection(ConnectionBase):
|
|||||||
raise AnsibleError("Internal Error: this module does not support optimized module pipelining")
|
raise AnsibleError("Internal Error: this module does not support optimized module pipelining")
|
||||||
|
|
||||||
# totally ignores privilege escalation
|
# totally ignores privilege escalation
|
||||||
display.vvv("EXEC %s" % cmd, host=self.host)
|
display.vvv(f"EXEC {cmd}", host=self.host)
|
||||||
p = self.client.command.run(cmd)[self.host]
|
p = self.client.command.run(cmd)[self.host]
|
||||||
return p[0], p[1], p[2]
|
return p[0], p[1], p[2]
|
||||||
|
|
||||||
@@ -87,14 +86,14 @@ class Connection(ConnectionBase):
|
|||||||
""" transfer a file from local to remote """
|
""" transfer a file from local to remote """
|
||||||
|
|
||||||
out_path = self._normalize_path(out_path, '/')
|
out_path = self._normalize_path(out_path, '/')
|
||||||
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
|
display.vvv(f"PUT {in_path} TO {out_path}", host=self.host)
|
||||||
self.client.local.copyfile.send(in_path, out_path)
|
self.client.local.copyfile.send(in_path, out_path)
|
||||||
|
|
||||||
def fetch_file(self, in_path, out_path):
|
def fetch_file(self, in_path, out_path):
|
||||||
""" fetch a file from remote to local """
|
""" fetch a file from remote to local """
|
||||||
|
|
||||||
in_path = self._normalize_path(in_path, '/')
|
in_path = self._normalize_path(in_path, '/')
|
||||||
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host)
|
display.vvv(f"FETCH {in_path} TO {out_path}", host=self.host)
|
||||||
# need to use a tmp dir due to difference of semantic for getfile
|
# need to use a tmp dir due to difference of semantic for getfile
|
||||||
# ( who take a # directory as destination) and fetch_file, who
|
# ( who take a # directory as destination) and fetch_file, who
|
||||||
# take a file directly
|
# take a file directly
|
||||||
|
|||||||
@@ -5,50 +5,74 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = """
|
DOCUMENTATION = r"""
|
||||||
author: Stéphane Graber (@stgraber)
|
author: Stéphane Graber (@stgraber)
|
||||||
name: incus
|
name: incus
|
||||||
short_description: Run tasks in Incus instances via the Incus CLI.
|
short_description: Run tasks in Incus instances using the Incus CLI
|
||||||
|
description:
|
||||||
|
- Run commands or put/fetch files to an existing Incus instance using Incus CLI.
|
||||||
|
version_added: "8.2.0"
|
||||||
|
options:
|
||||||
|
remote_addr:
|
||||||
description:
|
description:
|
||||||
- Run commands or put/fetch files to an existing Incus instance using Incus CLI.
|
- The instance identifier.
|
||||||
version_added: "8.2.0"
|
type: string
|
||||||
options:
|
default: inventory_hostname
|
||||||
remote_addr:
|
vars:
|
||||||
description:
|
- name: inventory_hostname
|
||||||
- The instance identifier.
|
- name: ansible_host
|
||||||
type: string
|
- name: ansible_incus_host
|
||||||
default: inventory_hostname
|
executable:
|
||||||
vars:
|
description:
|
||||||
- name: inventory_hostname
|
- The shell to use for execution inside the instance.
|
||||||
- name: ansible_host
|
type: string
|
||||||
- name: ansible_incus_host
|
default: /bin/sh
|
||||||
executable:
|
vars:
|
||||||
description:
|
- name: ansible_executable
|
||||||
- The shell to use for execution inside the instance.
|
- name: ansible_incus_executable
|
||||||
type: string
|
incus_become_method:
|
||||||
default: /bin/sh
|
description:
|
||||||
vars:
|
- Become command used to switch to a non-root user.
|
||||||
- name: ansible_executable
|
- Is only used when O(remote_user) is not V(root).
|
||||||
- name: ansible_incus_executable
|
type: str
|
||||||
remote:
|
default: /bin/su
|
||||||
description:
|
vars:
|
||||||
- The name of the Incus remote to use (per C(incus remote list)).
|
- name: incus_become_method
|
||||||
- Remotes are used to access multiple servers from a single client.
|
version_added: 10.4.0
|
||||||
type: string
|
remote:
|
||||||
default: local
|
description:
|
||||||
vars:
|
- The name of the Incus remote to use (per C(incus remote list)).
|
||||||
- name: ansible_incus_remote
|
- Remotes are used to access multiple servers from a single client.
|
||||||
project:
|
type: string
|
||||||
description:
|
default: local
|
||||||
- The name of the Incus project to use (per C(incus project list)).
|
vars:
|
||||||
- Projects are used to divide the instances running on a server.
|
- name: ansible_incus_remote
|
||||||
type: string
|
remote_user:
|
||||||
default: default
|
description:
|
||||||
vars:
|
- User to login/authenticate as.
|
||||||
- name: ansible_incus_project
|
- Can be set from the CLI via the C(--user) or C(-u) options.
|
||||||
|
type: string
|
||||||
|
default: root
|
||||||
|
vars:
|
||||||
|
- name: ansible_user
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_REMOTE_USER
|
||||||
|
ini:
|
||||||
|
- section: defaults
|
||||||
|
key: remote_user
|
||||||
|
keyword:
|
||||||
|
- name: remote_user
|
||||||
|
version_added: 10.4.0
|
||||||
|
project:
|
||||||
|
description:
|
||||||
|
- The name of the Incus project to use (per C(incus project list)).
|
||||||
|
- Projects are used to divide the instances running on a server.
|
||||||
|
type: string
|
||||||
|
default: default
|
||||||
|
vars:
|
||||||
|
- name: ansible_incus_project
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -56,7 +80,7 @@ from subprocess import call, Popen, PIPE
|
|||||||
|
|
||||||
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
|
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
|
||||||
from ansible.module_utils.common.process import get_bin_path
|
from ansible.module_utils.common.process import get_bin_path
|
||||||
from ansible.module_utils._text import to_bytes, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
from ansible.plugins.connection import ConnectionBase
|
from ansible.plugins.connection import ConnectionBase
|
||||||
|
|
||||||
|
|
||||||
@@ -65,7 +89,6 @@ class Connection(ConnectionBase):
|
|||||||
|
|
||||||
transport = "incus"
|
transport = "incus"
|
||||||
has_pipelining = True
|
has_pipelining = True
|
||||||
default_user = 'root'
|
|
||||||
|
|
||||||
def __init__(self, play_context, new_stdin, *args, **kwargs):
|
def __init__(self, play_context, new_stdin, *args, **kwargs):
|
||||||
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
|
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
|
||||||
@@ -80,10 +103,34 @@ class Connection(ConnectionBase):
|
|||||||
super(Connection, self)._connect()
|
super(Connection, self)._connect()
|
||||||
|
|
||||||
if not self._connected:
|
if not self._connected:
|
||||||
self._display.vvv(u"ESTABLISH Incus CONNECTION FOR USER: root",
|
self._display.vvv(f"ESTABLISH Incus CONNECTION FOR USER: {self.get_option('remote_user')}",
|
||||||
host=self._instance())
|
host=self._instance())
|
||||||
self._connected = True
|
self._connected = True
|
||||||
|
|
||||||
|
def _build_command(self, cmd) -> str:
|
||||||
|
"""build the command to execute on the incus host"""
|
||||||
|
|
||||||
|
exec_cmd = [
|
||||||
|
self._incus_cmd,
|
||||||
|
"--project", self.get_option("project"),
|
||||||
|
"exec",
|
||||||
|
f"{self.get_option('remote')}:{self._instance()}",
|
||||||
|
"--"]
|
||||||
|
|
||||||
|
if self.get_option("remote_user") != "root":
|
||||||
|
self._display.vvv(
|
||||||
|
f"INFO: Running as non-root user: {self.get_option('remote_user')}, \
|
||||||
|
trying to run 'incus exec' with become method: {self.get_option('incus_become_method')}",
|
||||||
|
host=self._instance(),
|
||||||
|
)
|
||||||
|
exec_cmd.extend(
|
||||||
|
[self.get_option("incus_become_method"), self.get_option("remote_user"), "-c"]
|
||||||
|
)
|
||||||
|
|
||||||
|
exec_cmd.extend([self.get_option("executable"), "-c", cmd])
|
||||||
|
|
||||||
|
return exec_cmd
|
||||||
|
|
||||||
def _instance(self):
|
def _instance(self):
|
||||||
# Return only the leading part of the FQDN as the instance name
|
# Return only the leading part of the FQDN as the instance name
|
||||||
# as Incus instance names cannot be a FQDN.
|
# as Incus instance names cannot be a FQDN.
|
||||||
@@ -93,16 +140,11 @@ class Connection(ConnectionBase):
|
|||||||
""" execute a command on the Incus host """
|
""" execute a command on the Incus host """
|
||||||
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
||||||
|
|
||||||
self._display.vvv(u"EXEC {0}".format(cmd),
|
self._display.vvv(f"EXEC {cmd}",
|
||||||
host=self._instance())
|
host=self._instance())
|
||||||
|
|
||||||
local_cmd = [
|
local_cmd = self._build_command(cmd)
|
||||||
self._incus_cmd,
|
self._display.vvvvv(f"EXEC {local_cmd}", host=self._instance())
|
||||||
"--project", self.get_option("project"),
|
|
||||||
"exec",
|
|
||||||
"%s:%s" % (self.get_option("remote"), self._instance()),
|
|
||||||
"--",
|
|
||||||
self._play_context.executable, "-c", cmd]
|
|
||||||
|
|
||||||
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
||||||
in_data = to_bytes(in_data, errors='surrogate_or_strict', nonstring='passthru')
|
in_data = to_bytes(in_data, errors='surrogate_or_strict', nonstring='passthru')
|
||||||
@@ -113,34 +155,96 @@ class Connection(ConnectionBase):
|
|||||||
stdout = to_text(stdout)
|
stdout = to_text(stdout)
|
||||||
stderr = to_text(stderr)
|
stderr = to_text(stderr)
|
||||||
|
|
||||||
if stderr == "Error: Instance is not running.\n":
|
if stderr.startswith("Error: ") and stderr.rstrip().endswith(
|
||||||
raise AnsibleConnectionFailure("instance not running: %s" %
|
": Instance is not running"
|
||||||
self._instance())
|
):
|
||||||
|
raise AnsibleConnectionFailure(
|
||||||
|
f"instance not running: {self._instance()} (remote={self.get_option('remote')}, project={self.get_option('project')})"
|
||||||
|
)
|
||||||
|
|
||||||
if stderr == "Error: Instance not found\n":
|
if stderr.startswith("Error: ") and stderr.rstrip().endswith(
|
||||||
raise AnsibleConnectionFailure("instance not found: %s" %
|
": Instance not found"
|
||||||
self._instance())
|
):
|
||||||
|
raise AnsibleConnectionFailure(
|
||||||
|
f"instance not found: {self._instance()} (remote={self.get_option('remote')}, project={self.get_option('project')})"
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
stderr.startswith("Error: ")
|
||||||
|
and ": User does not have permission " in stderr
|
||||||
|
):
|
||||||
|
raise AnsibleConnectionFailure(
|
||||||
|
f"instance access denied: {self._instance()} (remote={self.get_option('remote')}, project={self.get_option('project')})"
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
stderr.startswith("Error: ")
|
||||||
|
and ": User does not have entitlement " in stderr
|
||||||
|
):
|
||||||
|
raise AnsibleConnectionFailure(
|
||||||
|
f"instance access denied: {self._instance()} (remote={self.get_option('remote')}, project={self.get_option('project')})"
|
||||||
|
)
|
||||||
|
|
||||||
return process.returncode, stdout, stderr
|
return process.returncode, stdout, stderr
|
||||||
|
|
||||||
|
def _get_remote_uid_gid(self) -> tuple[int, int]:
|
||||||
|
"""Get the user and group ID of 'remote_user' from the instance."""
|
||||||
|
|
||||||
|
rc, uid_out, err = self.exec_command("/bin/id -u")
|
||||||
|
if rc != 0:
|
||||||
|
raise AnsibleError(
|
||||||
|
f"Failed to get remote uid for user {self.get_option('remote_user')}: {err}"
|
||||||
|
)
|
||||||
|
uid = uid_out.strip()
|
||||||
|
|
||||||
|
rc, gid_out, err = self.exec_command("/bin/id -g")
|
||||||
|
if rc != 0:
|
||||||
|
raise AnsibleError(
|
||||||
|
f"Failed to get remote gid for user {self.get_option('remote_user')}: {err}"
|
||||||
|
)
|
||||||
|
gid = gid_out.strip()
|
||||||
|
|
||||||
|
return int(uid), int(gid)
|
||||||
|
|
||||||
def put_file(self, in_path, out_path):
|
def put_file(self, in_path, out_path):
|
||||||
""" put a file from local to Incus """
|
""" put a file from local to Incus """
|
||||||
super(Connection, self).put_file(in_path, out_path)
|
super(Connection, self).put_file(in_path, out_path)
|
||||||
|
|
||||||
self._display.vvv(u"PUT {0} TO {1}".format(in_path, out_path),
|
self._display.vvv(f"PUT {in_path} TO {out_path}",
|
||||||
host=self._instance())
|
host=self._instance())
|
||||||
|
|
||||||
if not os.path.isfile(to_bytes(in_path, errors='surrogate_or_strict')):
|
if not os.path.isfile(to_bytes(in_path, errors='surrogate_or_strict')):
|
||||||
raise AnsibleFileNotFound("input path is not a file: %s" % in_path)
|
raise AnsibleFileNotFound(f"input path is not a file: {in_path}")
|
||||||
|
|
||||||
local_cmd = [
|
if self.get_option("remote_user") != "root":
|
||||||
self._incus_cmd,
|
uid, gid = self._get_remote_uid_gid()
|
||||||
"--project", self.get_option("project"),
|
local_cmd = [
|
||||||
"file", "push", "--quiet",
|
self._incus_cmd,
|
||||||
in_path,
|
"--project",
|
||||||
"%s:%s/%s" % (self.get_option("remote"),
|
self.get_option("project"),
|
||||||
self._instance(),
|
"file",
|
||||||
out_path)]
|
"push",
|
||||||
|
"--uid",
|
||||||
|
str(uid),
|
||||||
|
"--gid",
|
||||||
|
str(gid),
|
||||||
|
"--quiet",
|
||||||
|
in_path,
|
||||||
|
f"{self.get_option('remote')}:{self._instance()}/{out_path}",
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
local_cmd = [
|
||||||
|
self._incus_cmd,
|
||||||
|
"--project",
|
||||||
|
self.get_option("project"),
|
||||||
|
"file",
|
||||||
|
"push",
|
||||||
|
"--quiet",
|
||||||
|
in_path,
|
||||||
|
f"{self.get_option('remote')}:{self._instance()}/{out_path}",
|
||||||
|
]
|
||||||
|
|
||||||
|
self._display.vvvvv(f"PUT {local_cmd}", host=self._instance())
|
||||||
|
|
||||||
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
||||||
|
|
||||||
@@ -150,16 +254,14 @@ class Connection(ConnectionBase):
|
|||||||
""" fetch a file from Incus to local """
|
""" fetch a file from Incus to local """
|
||||||
super(Connection, self).fetch_file(in_path, out_path)
|
super(Connection, self).fetch_file(in_path, out_path)
|
||||||
|
|
||||||
self._display.vvv(u"FETCH {0} TO {1}".format(in_path, out_path),
|
self._display.vvv(f"FETCH {in_path} TO {out_path}",
|
||||||
host=self._instance())
|
host=self._instance())
|
||||||
|
|
||||||
local_cmd = [
|
local_cmd = [
|
||||||
self._incus_cmd,
|
self._incus_cmd,
|
||||||
"--project", self.get_option("project"),
|
"--project", self.get_option("project"),
|
||||||
"file", "pull", "--quiet",
|
"file", "pull", "--quiet",
|
||||||
"%s:%s/%s" % (self.get_option("remote"),
|
f"{self.get_option('remote')}:{self._instance()}/{in_path}",
|
||||||
self._instance(),
|
|
||||||
in_path),
|
|
||||||
out_path]
|
out_path]
|
||||||
|
|
||||||
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
||||||
|
|||||||
@@ -7,31 +7,30 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Stephan Lohse (!UNKNOWN) <dev-github@ploek.org>
|
author: Stephan Lohse (!UNKNOWN) <dev-github@ploek.org>
|
||||||
name: iocage
|
name: iocage
|
||||||
short_description: Run tasks in iocage jails
|
short_description: Run tasks in iocage jails
|
||||||
|
description:
|
||||||
|
- Run commands or put/fetch files to an existing iocage jail.
|
||||||
|
options:
|
||||||
|
remote_addr:
|
||||||
description:
|
description:
|
||||||
- Run commands or put/fetch files to an existing iocage jail
|
- Path to the jail.
|
||||||
options:
|
type: string
|
||||||
remote_addr:
|
vars:
|
||||||
description:
|
- name: ansible_host
|
||||||
- Path to the jail
|
- name: ansible_iocage_host
|
||||||
type: string
|
remote_user:
|
||||||
vars:
|
description:
|
||||||
- name: ansible_host
|
- User to execute as inside the jail.
|
||||||
- name: ansible_iocage_host
|
type: string
|
||||||
remote_user:
|
vars:
|
||||||
description:
|
- name: ansible_user
|
||||||
- User to execute as inside the jail
|
- name: ansible_iocage_user
|
||||||
type: string
|
"""
|
||||||
vars:
|
|
||||||
- name: ansible_user
|
|
||||||
- name: ansible_iocage_user
|
|
||||||
'''
|
|
||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
@@ -55,11 +54,12 @@ class Connection(Jail):
|
|||||||
|
|
||||||
jail_uuid = self.get_jail_uuid()
|
jail_uuid = self.get_jail_uuid()
|
||||||
|
|
||||||
kwargs[Jail.modified_jailname_key] = 'ioc-{0}'.format(jail_uuid)
|
kwargs[Jail.modified_jailname_key] = f'ioc-{jail_uuid}'
|
||||||
|
|
||||||
display.vvv(u"Jail {iocjail} has been translated to {rawjail}".format(
|
display.vvv(
|
||||||
iocjail=self.ioc_jail, rawjail=kwargs[Jail.modified_jailname_key]),
|
f"Jail {self.ioc_jail} has been translated to {kwargs[Jail.modified_jailname_key]}",
|
||||||
host=kwargs[Jail.modified_jailname_key])
|
host=kwargs[Jail.modified_jailname_key]
|
||||||
|
)
|
||||||
|
|
||||||
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
|
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
|
||||||
|
|
||||||
@@ -81,6 +81,6 @@ class Connection(Jail):
|
|||||||
p.wait()
|
p.wait()
|
||||||
|
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise AnsibleError(u"iocage returned an error: {0}".format(stdout))
|
raise AnsibleError(f"iocage returned an error: {stdout}")
|
||||||
|
|
||||||
return stdout.strip('\n')
|
return stdout.strip('\n')
|
||||||
|
|||||||
@@ -7,41 +7,40 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Ansible Core Team
|
author: Ansible Core Team
|
||||||
name: jail
|
name: jail
|
||||||
short_description: Run tasks in jails
|
short_description: Run tasks in jails
|
||||||
|
description:
|
||||||
|
- Run commands or put/fetch files to an existing jail.
|
||||||
|
options:
|
||||||
|
remote_addr:
|
||||||
description:
|
description:
|
||||||
- Run commands or put/fetch files to an existing jail
|
- Path to the jail.
|
||||||
options:
|
type: string
|
||||||
remote_addr:
|
default: inventory_hostname
|
||||||
description:
|
vars:
|
||||||
- Path to the jail
|
- name: inventory_hostname
|
||||||
type: string
|
- name: ansible_host
|
||||||
default: inventory_hostname
|
- name: ansible_jail_host
|
||||||
vars:
|
remote_user:
|
||||||
- name: inventory_hostname
|
description:
|
||||||
- name: ansible_host
|
- User to execute as inside the jail.
|
||||||
- name: ansible_jail_host
|
type: string
|
||||||
remote_user:
|
vars:
|
||||||
description:
|
- name: ansible_user
|
||||||
- User to execute as inside the jail
|
- name: ansible_jail_user
|
||||||
type: string
|
"""
|
||||||
vars:
|
|
||||||
- name: ansible_user
|
|
||||||
- name: ansible_jail_user
|
|
||||||
'''
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import subprocess
|
import subprocess
|
||||||
import traceback
|
import traceback
|
||||||
|
from shlex import quote as shlex_quote
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils.six.moves import shlex_quote
|
|
||||||
from ansible.module_utils.common.process import get_bin_path
|
from ansible.module_utils.common.process import get_bin_path
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||||
from ansible.plugins.connection import ConnectionBase, BUFSIZE
|
from ansible.plugins.connection import ConnectionBase, BUFSIZE
|
||||||
@@ -75,14 +74,14 @@ class Connection(ConnectionBase):
|
|||||||
self.jexec_cmd = self._search_executable('jexec')
|
self.jexec_cmd = self._search_executable('jexec')
|
||||||
|
|
||||||
if self.jail not in self.list_jails():
|
if self.jail not in self.list_jails():
|
||||||
raise AnsibleError("incorrect jail name %s" % self.jail)
|
raise AnsibleError(f"incorrect jail name {self.jail}")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _search_executable(executable):
|
def _search_executable(executable):
|
||||||
try:
|
try:
|
||||||
return get_bin_path(executable)
|
return get_bin_path(executable)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise AnsibleError("%s command not found in PATH" % executable)
|
raise AnsibleError(f"{executable} command not found in PATH")
|
||||||
|
|
||||||
def list_jails(self):
|
def list_jails(self):
|
||||||
p = subprocess.Popen([self.jls_cmd, '-q', 'name'],
|
p = subprocess.Popen([self.jls_cmd, '-q', 'name'],
|
||||||
@@ -97,7 +96,7 @@ class Connection(ConnectionBase):
|
|||||||
""" connect to the jail; nothing to do here """
|
""" connect to the jail; nothing to do here """
|
||||||
super(Connection, self)._connect()
|
super(Connection, self)._connect()
|
||||||
if not self._connected:
|
if not self._connected:
|
||||||
display.vvv(u"ESTABLISH JAIL CONNECTION FOR USER: {0}".format(self._play_context.remote_user), host=self.jail)
|
display.vvv(f"ESTABLISH JAIL CONNECTION FOR USER: {self._play_context.remote_user}", host=self.jail)
|
||||||
self._connected = True
|
self._connected = True
|
||||||
|
|
||||||
def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE):
|
def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE):
|
||||||
@@ -115,11 +114,11 @@ class Connection(ConnectionBase):
|
|||||||
if self._play_context.remote_user is not None:
|
if self._play_context.remote_user is not None:
|
||||||
local_cmd += ['-U', self._play_context.remote_user]
|
local_cmd += ['-U', self._play_context.remote_user]
|
||||||
# update HOME since -U does not update the jail environment
|
# update HOME since -U does not update the jail environment
|
||||||
set_env = 'HOME=~' + self._play_context.remote_user + ' '
|
set_env = f"HOME=~{self._play_context.remote_user} "
|
||||||
|
|
||||||
local_cmd += [self.jail, self._play_context.executable, '-c', set_env + cmd]
|
local_cmd += [self.jail, self._play_context.executable, '-c', set_env + cmd]
|
||||||
|
|
||||||
display.vvv("EXEC %s" % (local_cmd,), host=self.jail)
|
display.vvv(f"EXEC {local_cmd}", host=self.jail)
|
||||||
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
||||||
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
|
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
@@ -144,7 +143,7 @@ class Connection(ConnectionBase):
|
|||||||
exist in any given chroot. So for now we're choosing "/" instead.
|
exist in any given chroot. So for now we're choosing "/" instead.
|
||||||
This also happens to be the former default.
|
This also happens to be the former default.
|
||||||
|
|
||||||
Can revisit using $HOME instead if it's a problem
|
Can revisit using $HOME instead if it is a problem
|
||||||
"""
|
"""
|
||||||
if not remote_path.startswith(os.path.sep):
|
if not remote_path.startswith(os.path.sep):
|
||||||
remote_path = os.path.join(os.path.sep, remote_path)
|
remote_path = os.path.join(os.path.sep, remote_path)
|
||||||
@@ -153,7 +152,7 @@ class Connection(ConnectionBase):
|
|||||||
def put_file(self, in_path, out_path):
|
def put_file(self, in_path, out_path):
|
||||||
""" transfer a file from local to jail """
|
""" transfer a file from local to jail """
|
||||||
super(Connection, self).put_file(in_path, out_path)
|
super(Connection, self).put_file(in_path, out_path)
|
||||||
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.jail)
|
display.vvv(f"PUT {in_path} TO {out_path}", host=self.jail)
|
||||||
|
|
||||||
out_path = shlex_quote(self._prefix_login_path(out_path))
|
out_path = shlex_quote(self._prefix_login_path(out_path))
|
||||||
try:
|
try:
|
||||||
@@ -163,27 +162,27 @@ class Connection(ConnectionBase):
|
|||||||
else:
|
else:
|
||||||
count = ''
|
count = ''
|
||||||
try:
|
try:
|
||||||
p = self._buffered_exec_command('dd of=%s bs=%s%s' % (out_path, BUFSIZE, count), stdin=in_file)
|
p = self._buffered_exec_command(f'dd of={out_path} bs={BUFSIZE}{count}', stdin=in_file)
|
||||||
except OSError:
|
except OSError:
|
||||||
raise AnsibleError("jail connection requires dd command in the jail")
|
raise AnsibleError("jail connection requires dd command in the jail")
|
||||||
try:
|
try:
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
except Exception:
|
except Exception:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, to_native(stdout), to_native(stderr)))
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{to_native(stdout)}\n{to_native(stderr)}")
|
||||||
except IOError:
|
except IOError:
|
||||||
raise AnsibleError("file or module does not exist at: %s" % in_path)
|
raise AnsibleError(f"file or module does not exist at: {in_path}")
|
||||||
|
|
||||||
def fetch_file(self, in_path, out_path):
|
def fetch_file(self, in_path, out_path):
|
||||||
""" fetch a file from jail to local """
|
""" fetch a file from jail to local """
|
||||||
super(Connection, self).fetch_file(in_path, out_path)
|
super(Connection, self).fetch_file(in_path, out_path)
|
||||||
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.jail)
|
display.vvv(f"FETCH {in_path} TO {out_path}", host=self.jail)
|
||||||
|
|
||||||
in_path = shlex_quote(self._prefix_login_path(in_path))
|
in_path = shlex_quote(self._prefix_login_path(in_path))
|
||||||
try:
|
try:
|
||||||
p = self._buffered_exec_command('dd if=%s bs=%s' % (in_path, BUFSIZE))
|
p = self._buffered_exec_command(f'dd if={in_path} bs={BUFSIZE}')
|
||||||
except OSError:
|
except OSError:
|
||||||
raise AnsibleError("jail connection requires dd command in the jail")
|
raise AnsibleError("jail connection requires dd command in the jail")
|
||||||
|
|
||||||
@@ -195,10 +194,10 @@ class Connection(ConnectionBase):
|
|||||||
chunk = p.stdout.read(BUFSIZE)
|
chunk = p.stdout.read(BUFSIZE)
|
||||||
except Exception:
|
except Exception:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, to_native(stdout), to_native(stderr)))
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{to_native(stdout)}\n{to_native(stderr)}")
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
""" terminate the connection; nothing to do here """
|
""" terminate the connection; nothing to do here """
|
||||||
|
|||||||
@@ -4,34 +4,33 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Joerg Thalheim (!UNKNOWN) <joerg@higgsboson.tk>
|
author: Joerg Thalheim (!UNKNOWN) <joerg@higgsboson.tk>
|
||||||
name: lxc
|
name: lxc
|
||||||
short_description: Run tasks in lxc containers via lxc python library
|
short_description: Run tasks in LXC containers using lxc python library
|
||||||
|
description:
|
||||||
|
- Run commands or put/fetch files to an existing LXC container using lxc python library.
|
||||||
|
options:
|
||||||
|
remote_addr:
|
||||||
description:
|
description:
|
||||||
- Run commands or put/fetch files to an existing lxc container using lxc python library
|
- Container identifier.
|
||||||
options:
|
type: string
|
||||||
remote_addr:
|
default: inventory_hostname
|
||||||
description:
|
vars:
|
||||||
- Container identifier
|
- name: inventory_hostname
|
||||||
type: string
|
- name: ansible_host
|
||||||
default: inventory_hostname
|
- name: ansible_lxc_host
|
||||||
vars:
|
executable:
|
||||||
- name: inventory_hostname
|
default: /bin/sh
|
||||||
- name: ansible_host
|
description:
|
||||||
- name: ansible_lxc_host
|
- Shell executable.
|
||||||
executable:
|
type: string
|
||||||
default: /bin/sh
|
vars:
|
||||||
description:
|
- name: ansible_executable
|
||||||
- Shell executable
|
- name: ansible_lxc_executable
|
||||||
type: string
|
"""
|
||||||
vars:
|
|
||||||
- name: ansible_executable
|
|
||||||
- name: ansible_lxc_executable
|
|
||||||
'''
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
@@ -82,7 +81,7 @@ class Connection(ConnectionBase):
|
|||||||
self._display.vvv("THIS IS A LOCAL LXC DIR", host=self.container_name)
|
self._display.vvv("THIS IS A LOCAL LXC DIR", host=self.container_name)
|
||||||
self.container = _lxc.Container(self.container_name)
|
self.container = _lxc.Container(self.container_name)
|
||||||
if self.container.state == "STOPPED":
|
if self.container.state == "STOPPED":
|
||||||
raise errors.AnsibleError("%s is not running" % self.container_name)
|
raise errors.AnsibleError(f"{self.container_name} is not running")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _communicate(pid, in_data, stdin, stdout, stderr):
|
def _communicate(pid, in_data, stdin, stdout, stderr):
|
||||||
@@ -144,10 +143,10 @@ class Connection(ConnectionBase):
|
|||||||
read_stdin, write_stdin = os.pipe()
|
read_stdin, write_stdin = os.pipe()
|
||||||
kwargs['stdin'] = self._set_nonblocking(read_stdin)
|
kwargs['stdin'] = self._set_nonblocking(read_stdin)
|
||||||
|
|
||||||
self._display.vvv("EXEC %s" % (local_cmd), host=self.container_name)
|
self._display.vvv(f"EXEC {local_cmd}", host=self.container_name)
|
||||||
pid = self.container.attach(_lxc.attach_run_command, local_cmd, **kwargs)
|
pid = self.container.attach(_lxc.attach_run_command, local_cmd, **kwargs)
|
||||||
if pid == -1:
|
if pid == -1:
|
||||||
msg = "failed to attach to container %s" % self.container_name
|
msg = f"failed to attach to container {self.container_name}"
|
||||||
raise errors.AnsibleError(msg)
|
raise errors.AnsibleError(msg)
|
||||||
|
|
||||||
write_stdout = os.close(write_stdout)
|
write_stdout = os.close(write_stdout)
|
||||||
@@ -174,18 +173,18 @@ class Connection(ConnectionBase):
|
|||||||
def put_file(self, in_path, out_path):
|
def put_file(self, in_path, out_path):
|
||||||
''' transfer a file from local to lxc '''
|
''' transfer a file from local to lxc '''
|
||||||
super(Connection, self).put_file(in_path, out_path)
|
super(Connection, self).put_file(in_path, out_path)
|
||||||
self._display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.container_name)
|
self._display.vvv(f"PUT {in_path} TO {out_path}", host=self.container_name)
|
||||||
in_path = to_bytes(in_path, errors='surrogate_or_strict')
|
in_path = to_bytes(in_path, errors='surrogate_or_strict')
|
||||||
out_path = to_bytes(out_path, errors='surrogate_or_strict')
|
out_path = to_bytes(out_path, errors='surrogate_or_strict')
|
||||||
|
|
||||||
if not os.path.exists(in_path):
|
if not os.path.exists(in_path):
|
||||||
msg = "file or module does not exist: %s" % in_path
|
msg = f"file or module does not exist: {in_path}"
|
||||||
raise errors.AnsibleFileNotFound(msg)
|
raise errors.AnsibleFileNotFound(msg)
|
||||||
try:
|
try:
|
||||||
src_file = open(in_path, "rb")
|
src_file = open(in_path, "rb")
|
||||||
except IOError:
|
except IOError:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise errors.AnsibleError("failed to open input file to %s" % in_path)
|
raise errors.AnsibleError(f"failed to open input file to {in_path}")
|
||||||
try:
|
try:
|
||||||
def write_file(args):
|
def write_file(args):
|
||||||
with open(out_path, 'wb+') as dst_file:
|
with open(out_path, 'wb+') as dst_file:
|
||||||
@@ -194,7 +193,7 @@ class Connection(ConnectionBase):
|
|||||||
self.container.attach_wait(write_file, None)
|
self.container.attach_wait(write_file, None)
|
||||||
except IOError:
|
except IOError:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
msg = "failed to transfer file to %s" % out_path
|
msg = f"failed to transfer file to {out_path}"
|
||||||
raise errors.AnsibleError(msg)
|
raise errors.AnsibleError(msg)
|
||||||
finally:
|
finally:
|
||||||
src_file.close()
|
src_file.close()
|
||||||
@@ -202,7 +201,7 @@ class Connection(ConnectionBase):
|
|||||||
def fetch_file(self, in_path, out_path):
|
def fetch_file(self, in_path, out_path):
|
||||||
''' fetch a file from lxc to local '''
|
''' fetch a file from lxc to local '''
|
||||||
super(Connection, self).fetch_file(in_path, out_path)
|
super(Connection, self).fetch_file(in_path, out_path)
|
||||||
self._display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.container_name)
|
self._display.vvv(f"FETCH {in_path} TO {out_path}", host=self.container_name)
|
||||||
in_path = to_bytes(in_path, errors='surrogate_or_strict')
|
in_path = to_bytes(in_path, errors='surrogate_or_strict')
|
||||||
out_path = to_bytes(out_path, errors='surrogate_or_strict')
|
out_path = to_bytes(out_path, errors='surrogate_or_strict')
|
||||||
|
|
||||||
@@ -210,7 +209,7 @@ class Connection(ConnectionBase):
|
|||||||
dst_file = open(out_path, "wb")
|
dst_file = open(out_path, "wb")
|
||||||
except IOError:
|
except IOError:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
msg = "failed to open output file %s" % out_path
|
msg = f"failed to open output file {out_path}"
|
||||||
raise errors.AnsibleError(msg)
|
raise errors.AnsibleError(msg)
|
||||||
try:
|
try:
|
||||||
def write_file(args):
|
def write_file(args):
|
||||||
@@ -225,7 +224,7 @@ class Connection(ConnectionBase):
|
|||||||
self.container.attach_wait(write_file, None)
|
self.container.attach_wait(write_file, None)
|
||||||
except IOError:
|
except IOError:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
msg = "failed to transfer file from %s to %s" % (in_path, out_path)
|
msg = f"failed to transfer file from {in_path} to {out_path}"
|
||||||
raise errors.AnsibleError(msg)
|
raise errors.AnsibleError(msg)
|
||||||
finally:
|
finally:
|
||||||
dst_file.close()
|
dst_file.close()
|
||||||
|
|||||||
@@ -4,51 +4,75 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Matt Clay (@mattclay) <matt@mystile.com>
|
author: Matt Clay (@mattclay) <matt@mystile.com>
|
||||||
name: lxd
|
name: lxd
|
||||||
short_description: Run tasks in LXD instances via C(lxc) CLI
|
short_description: Run tasks in LXD instances using C(lxc) CLI
|
||||||
|
description:
|
||||||
|
- Run commands or put/fetch files to an existing instance using C(lxc) CLI.
|
||||||
|
options:
|
||||||
|
remote_addr:
|
||||||
description:
|
description:
|
||||||
- Run commands or put/fetch files to an existing instance using C(lxc) CLI.
|
- Instance (container/VM) identifier.
|
||||||
options:
|
- Since community.general 8.0.0, a FQDN can be provided; in that case, the first component (the part before C(.)) is
|
||||||
remote_addr:
|
used as the instance identifier.
|
||||||
description:
|
type: string
|
||||||
- Instance (container/VM) identifier.
|
default: inventory_hostname
|
||||||
- Since community.general 8.0.0, a FQDN can be provided; in that case, the first component (the part before C(.))
|
vars:
|
||||||
is used as the instance identifier.
|
- name: inventory_hostname
|
||||||
type: string
|
- name: ansible_host
|
||||||
default: inventory_hostname
|
- name: ansible_lxd_host
|
||||||
vars:
|
executable:
|
||||||
- name: inventory_hostname
|
description:
|
||||||
- name: ansible_host
|
- Shell to use for execution inside instance.
|
||||||
- name: ansible_lxd_host
|
type: string
|
||||||
executable:
|
default: /bin/sh
|
||||||
description:
|
vars:
|
||||||
- Shell to use for execution inside instance.
|
- name: ansible_executable
|
||||||
type: string
|
- name: ansible_lxd_executable
|
||||||
default: /bin/sh
|
lxd_become_method:
|
||||||
vars:
|
description:
|
||||||
- name: ansible_executable
|
- Become command used to switch to a non-root user.
|
||||||
- name: ansible_lxd_executable
|
- Is only used when O(remote_user) is not V(root).
|
||||||
remote:
|
type: str
|
||||||
description:
|
default: /bin/su
|
||||||
- Name of the LXD remote to use.
|
vars:
|
||||||
type: string
|
- name: lxd_become_method
|
||||||
default: local
|
version_added: 10.4.0
|
||||||
vars:
|
remote:
|
||||||
- name: ansible_lxd_remote
|
description:
|
||||||
version_added: 2.0.0
|
- Name of the LXD remote to use.
|
||||||
project:
|
type: string
|
||||||
description:
|
default: local
|
||||||
- Name of the LXD project to use.
|
vars:
|
||||||
type: string
|
- name: ansible_lxd_remote
|
||||||
vars:
|
version_added: 2.0.0
|
||||||
- name: ansible_lxd_project
|
remote_user:
|
||||||
version_added: 2.0.0
|
description:
|
||||||
'''
|
- User to login/authenticate as.
|
||||||
|
- Can be set from the CLI via the C(--user) or C(-u) options.
|
||||||
|
type: string
|
||||||
|
default: root
|
||||||
|
vars:
|
||||||
|
- name: ansible_user
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_REMOTE_USER
|
||||||
|
ini:
|
||||||
|
- section: defaults
|
||||||
|
key: remote_user
|
||||||
|
keyword:
|
||||||
|
- name: remote_user
|
||||||
|
version_added: 10.4.0
|
||||||
|
project:
|
||||||
|
description:
|
||||||
|
- Name of the LXD project to use.
|
||||||
|
type: string
|
||||||
|
vars:
|
||||||
|
- name: ansible_lxd_project
|
||||||
|
version_added: 2.0.0
|
||||||
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from subprocess import Popen, PIPE
|
from subprocess import Popen, PIPE
|
||||||
@@ -64,7 +88,6 @@ class Connection(ConnectionBase):
|
|||||||
|
|
||||||
transport = 'community.general.lxd'
|
transport = 'community.general.lxd'
|
||||||
has_pipelining = True
|
has_pipelining = True
|
||||||
default_user = 'root'
|
|
||||||
|
|
||||||
def __init__(self, play_context, new_stdin, *args, **kwargs):
|
def __init__(self, play_context, new_stdin, *args, **kwargs):
|
||||||
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
|
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
|
||||||
@@ -74,9 +97,6 @@ class Connection(ConnectionBase):
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
raise AnsibleError("lxc command not found in PATH")
|
raise AnsibleError("lxc command not found in PATH")
|
||||||
|
|
||||||
if self._play_context.remote_user is not None and self._play_context.remote_user != 'root':
|
|
||||||
self._display.warning('lxd does not support remote_user, using default: root')
|
|
||||||
|
|
||||||
def _host(self):
|
def _host(self):
|
||||||
""" translate remote_addr to lxd (short) hostname """
|
""" translate remote_addr to lxd (short) hostname """
|
||||||
return self.get_option("remote_addr").split(".", 1)[0]
|
return self.get_option("remote_addr").split(".", 1)[0]
|
||||||
@@ -86,26 +106,41 @@ class Connection(ConnectionBase):
|
|||||||
super(Connection, self)._connect()
|
super(Connection, self)._connect()
|
||||||
|
|
||||||
if not self._connected:
|
if not self._connected:
|
||||||
self._display.vvv(u"ESTABLISH LXD CONNECTION FOR USER: root", host=self._host())
|
self._display.vvv(f"ESTABLISH LXD CONNECTION FOR USER: {self.get_option('remote_user')}", host=self._host())
|
||||||
self._connected = True
|
self._connected = True
|
||||||
|
|
||||||
|
def _build_command(self, cmd) -> str:
|
||||||
|
"""build the command to execute on the lxd host"""
|
||||||
|
|
||||||
|
exec_cmd = [self._lxc_cmd]
|
||||||
|
|
||||||
|
if self.get_option("project"):
|
||||||
|
exec_cmd.extend(["--project", self.get_option("project")])
|
||||||
|
|
||||||
|
exec_cmd.extend(["exec", f"{self.get_option('remote')}:{self._host()}", "--"])
|
||||||
|
|
||||||
|
if self.get_option("remote_user") != "root":
|
||||||
|
self._display.vvv(
|
||||||
|
f"INFO: Running as non-root user: {self.get_option('remote_user')}, \
|
||||||
|
trying to run 'lxc exec' with become method: {self.get_option('lxd_become_method')}",
|
||||||
|
host=self._host(),
|
||||||
|
)
|
||||||
|
exec_cmd.extend(
|
||||||
|
[self.get_option("lxd_become_method"), self.get_option("remote_user"), "-c"]
|
||||||
|
)
|
||||||
|
|
||||||
|
exec_cmd.extend([self.get_option("executable"), "-c", cmd])
|
||||||
|
|
||||||
|
return exec_cmd
|
||||||
|
|
||||||
def exec_command(self, cmd, in_data=None, sudoable=True):
|
def exec_command(self, cmd, in_data=None, sudoable=True):
|
||||||
""" execute a command on the lxd host """
|
""" execute a command on the lxd host """
|
||||||
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
||||||
|
|
||||||
self._display.vvv(u"EXEC {0}".format(cmd), host=self._host())
|
self._display.vvv(f"EXEC {cmd}", host=self._host())
|
||||||
|
|
||||||
local_cmd = [self._lxc_cmd]
|
local_cmd = self._build_command(cmd)
|
||||||
if self.get_option("project"):
|
self._display.vvvvv(f"EXEC {local_cmd}", host=self._host())
|
||||||
local_cmd.extend(["--project", self.get_option("project")])
|
|
||||||
local_cmd.extend([
|
|
||||||
"exec",
|
|
||||||
"%s:%s" % (self.get_option("remote"), self._host()),
|
|
||||||
"--",
|
|
||||||
self.get_option("executable"), "-c", cmd
|
|
||||||
])
|
|
||||||
|
|
||||||
self._display.vvvvv(u"EXEC {0}".format(local_cmd), host=self._host())
|
|
||||||
|
|
||||||
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
||||||
in_data = to_bytes(in_data, errors='surrogate_or_strict', nonstring='passthru')
|
in_data = to_bytes(in_data, errors='surrogate_or_strict', nonstring='passthru')
|
||||||
@@ -116,33 +151,73 @@ class Connection(ConnectionBase):
|
|||||||
stdout = to_text(stdout)
|
stdout = to_text(stdout)
|
||||||
stderr = to_text(stderr)
|
stderr = to_text(stderr)
|
||||||
|
|
||||||
self._display.vvvvv(u"EXEC lxc output: {0} {1}".format(stdout, stderr), host=self._host())
|
self._display.vvvvv(f"EXEC lxc output: {stdout} {stderr}", host=self._host())
|
||||||
|
|
||||||
if "is not running" in stderr:
|
if "is not running" in stderr:
|
||||||
raise AnsibleConnectionFailure("instance not running: %s" % self._host())
|
raise AnsibleConnectionFailure(f"instance not running: {self._host()}")
|
||||||
|
|
||||||
if stderr.strip() == "Error: Instance not found" or stderr.strip() == "error: not found":
|
if stderr.strip() == "Error: Instance not found" or stderr.strip() == "error: not found":
|
||||||
raise AnsibleConnectionFailure("instance not found: %s" % self._host())
|
raise AnsibleConnectionFailure(f"instance not found: {self._host()}")
|
||||||
|
|
||||||
return process.returncode, stdout, stderr
|
return process.returncode, stdout, stderr
|
||||||
|
|
||||||
|
def _get_remote_uid_gid(self) -> tuple[int, int]:
|
||||||
|
"""Get the user and group ID of 'remote_user' from the instance."""
|
||||||
|
|
||||||
|
rc, uid_out, err = self.exec_command("/bin/id -u")
|
||||||
|
if rc != 0:
|
||||||
|
raise AnsibleError(
|
||||||
|
f"Failed to get remote uid for user {self.get_option('remote_user')}: {err}"
|
||||||
|
)
|
||||||
|
uid = uid_out.strip()
|
||||||
|
|
||||||
|
rc, gid_out, err = self.exec_command("/bin/id -g")
|
||||||
|
if rc != 0:
|
||||||
|
raise AnsibleError(
|
||||||
|
f"Failed to get remote gid for user {self.get_option('remote_user')}: {err}"
|
||||||
|
)
|
||||||
|
gid = gid_out.strip()
|
||||||
|
|
||||||
|
return int(uid), int(gid)
|
||||||
|
|
||||||
def put_file(self, in_path, out_path):
|
def put_file(self, in_path, out_path):
|
||||||
""" put a file from local to lxd """
|
""" put a file from local to lxd """
|
||||||
super(Connection, self).put_file(in_path, out_path)
|
super(Connection, self).put_file(in_path, out_path)
|
||||||
|
|
||||||
self._display.vvv(u"PUT {0} TO {1}".format(in_path, out_path), host=self._host())
|
self._display.vvv(f"PUT {in_path} TO {out_path}", host=self._host())
|
||||||
|
|
||||||
if not os.path.isfile(to_bytes(in_path, errors='surrogate_or_strict')):
|
if not os.path.isfile(to_bytes(in_path, errors='surrogate_or_strict')):
|
||||||
raise AnsibleFileNotFound("input path is not a file: %s" % in_path)
|
raise AnsibleFileNotFound(f"input path is not a file: {in_path}")
|
||||||
|
|
||||||
local_cmd = [self._lxc_cmd]
|
local_cmd = [self._lxc_cmd]
|
||||||
if self.get_option("project"):
|
if self.get_option("project"):
|
||||||
local_cmd.extend(["--project", self.get_option("project")])
|
local_cmd.extend(["--project", self.get_option("project")])
|
||||||
local_cmd.extend([
|
|
||||||
"file", "push",
|
if self.get_option("remote_user") != "root":
|
||||||
in_path,
|
uid, gid = self._get_remote_uid_gid()
|
||||||
"%s:%s/%s" % (self.get_option("remote"), self._host(), out_path)
|
local_cmd.extend(
|
||||||
])
|
[
|
||||||
|
"file",
|
||||||
|
"push",
|
||||||
|
"--uid",
|
||||||
|
str(uid),
|
||||||
|
"--gid",
|
||||||
|
str(gid),
|
||||||
|
in_path,
|
||||||
|
f"{self.get_option('remote')}:{self._host()}/{out_path}",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
local_cmd.extend(
|
||||||
|
[
|
||||||
|
"file",
|
||||||
|
"push",
|
||||||
|
in_path,
|
||||||
|
f"{self.get_option('remote')}:{self._host()}/{out_path}",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
self._display.vvvvv(f"PUT {local_cmd}", host=self._host())
|
||||||
|
|
||||||
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
||||||
|
|
||||||
@@ -153,14 +228,14 @@ class Connection(ConnectionBase):
|
|||||||
""" fetch a file from lxd to local """
|
""" fetch a file from lxd to local """
|
||||||
super(Connection, self).fetch_file(in_path, out_path)
|
super(Connection, self).fetch_file(in_path, out_path)
|
||||||
|
|
||||||
self._display.vvv(u"FETCH {0} TO {1}".format(in_path, out_path), host=self._host())
|
self._display.vvv(f"FETCH {in_path} TO {out_path}", host=self._host())
|
||||||
|
|
||||||
local_cmd = [self._lxc_cmd]
|
local_cmd = [self._lxc_cmd]
|
||||||
if self.get_option("project"):
|
if self.get_option("project"):
|
||||||
local_cmd.extend(["--project", self.get_option("project")])
|
local_cmd.extend(["--project", self.get_option("project")])
|
||||||
local_cmd.extend([
|
local_cmd.extend([
|
||||||
"file", "pull",
|
"file", "pull",
|
||||||
"%s:%s/%s" % (self.get_option("remote"), self._host(), in_path),
|
f"{self.get_option('remote')}:{self._host()}/{in_path}",
|
||||||
out_path
|
out_path
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|||||||
864
plugins/connection/proxmox_pct_remote.py
Normal file
864
plugins/connection/proxmox_pct_remote.py
Normal file
@@ -0,0 +1,864 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Derived from ansible/plugins/connection/paramiko_ssh.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
||||||
|
# Copyright (c) 2024 Nils Stein (@mietzen) <github.nstein@mailbox.org>
|
||||||
|
# Copyright (c) 2024 Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
DOCUMENTATION = r"""
|
||||||
|
author: Nils Stein (@mietzen) <github.nstein@mailbox.org>
|
||||||
|
name: proxmox_pct_remote
|
||||||
|
short_description: Run tasks in Proxmox LXC container instances using pct CLI via SSH
|
||||||
|
requirements:
|
||||||
|
- paramiko
|
||||||
|
description:
|
||||||
|
- Run commands or put/fetch files to an existing Proxmox LXC container using pct CLI via SSH.
|
||||||
|
- Uses the Python SSH implementation (Paramiko) to connect to the Proxmox host.
|
||||||
|
version_added: "10.3.0"
|
||||||
|
options:
|
||||||
|
remote_addr:
|
||||||
|
description:
|
||||||
|
- Address of the remote target.
|
||||||
|
default: inventory_hostname
|
||||||
|
type: string
|
||||||
|
vars:
|
||||||
|
- name: inventory_hostname
|
||||||
|
- name: ansible_host
|
||||||
|
- name: ansible_ssh_host
|
||||||
|
- name: ansible_paramiko_host
|
||||||
|
port:
|
||||||
|
description: Remote port to connect to.
|
||||||
|
type: int
|
||||||
|
default: 22
|
||||||
|
ini:
|
||||||
|
- section: defaults
|
||||||
|
key: remote_port
|
||||||
|
- section: paramiko_connection
|
||||||
|
key: remote_port
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_REMOTE_PORT
|
||||||
|
- name: ANSIBLE_REMOTE_PARAMIKO_PORT
|
||||||
|
vars:
|
||||||
|
- name: ansible_port
|
||||||
|
- name: ansible_ssh_port
|
||||||
|
- name: ansible_paramiko_port
|
||||||
|
keyword:
|
||||||
|
- name: port
|
||||||
|
remote_user:
|
||||||
|
description:
|
||||||
|
- User to login/authenticate as.
|
||||||
|
- Can be set from the CLI via the C(--user) or C(-u) options.
|
||||||
|
type: string
|
||||||
|
vars:
|
||||||
|
- name: ansible_user
|
||||||
|
- name: ansible_ssh_user
|
||||||
|
- name: ansible_paramiko_user
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_REMOTE_USER
|
||||||
|
- name: ANSIBLE_PARAMIKO_REMOTE_USER
|
||||||
|
ini:
|
||||||
|
- section: defaults
|
||||||
|
key: remote_user
|
||||||
|
- section: paramiko_connection
|
||||||
|
key: remote_user
|
||||||
|
keyword:
|
||||||
|
- name: remote_user
|
||||||
|
password:
|
||||||
|
description:
|
||||||
|
- Secret used to either login the SSH server or as a passphrase for SSH keys that require it.
|
||||||
|
- Can be set from the CLI via the C(--ask-pass) option.
|
||||||
|
type: string
|
||||||
|
vars:
|
||||||
|
- name: ansible_password
|
||||||
|
- name: ansible_ssh_pass
|
||||||
|
- name: ansible_ssh_password
|
||||||
|
- name: ansible_paramiko_pass
|
||||||
|
- name: ansible_paramiko_password
|
||||||
|
use_rsa_sha2_algorithms:
|
||||||
|
description:
|
||||||
|
- Whether or not to enable RSA SHA2 algorithms for pubkeys and hostkeys.
|
||||||
|
- On paramiko versions older than 2.9, this only affects hostkeys.
|
||||||
|
- For behavior matching paramiko<2.9 set this to V(false).
|
||||||
|
vars:
|
||||||
|
- name: ansible_paramiko_use_rsa_sha2_algorithms
|
||||||
|
ini:
|
||||||
|
- {key: use_rsa_sha2_algorithms, section: paramiko_connection}
|
||||||
|
env:
|
||||||
|
- {name: ANSIBLE_PARAMIKO_USE_RSA_SHA2_ALGORITHMS}
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
host_key_auto_add:
|
||||||
|
description: "Automatically add host keys to C(~/.ssh/known_hosts)."
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_PARAMIKO_HOST_KEY_AUTO_ADD
|
||||||
|
ini:
|
||||||
|
- key: host_key_auto_add
|
||||||
|
section: paramiko_connection
|
||||||
|
type: boolean
|
||||||
|
look_for_keys:
|
||||||
|
default: True
|
||||||
|
description: "Set to V(false) to disable searching for private key files in C(~/.ssh/)."
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_PARAMIKO_LOOK_FOR_KEYS
|
||||||
|
ini:
|
||||||
|
- {key: look_for_keys, section: paramiko_connection}
|
||||||
|
type: boolean
|
||||||
|
proxy_command:
|
||||||
|
default: ""
|
||||||
|
description:
|
||||||
|
- Proxy information for running the connection via a jumphost.
|
||||||
|
type: string
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_PARAMIKO_PROXY_COMMAND
|
||||||
|
ini:
|
||||||
|
- {key: proxy_command, section: paramiko_connection}
|
||||||
|
vars:
|
||||||
|
- name: ansible_paramiko_proxy_command
|
||||||
|
pty:
|
||||||
|
default: True
|
||||||
|
description: "C(sudo) usually requires a PTY, V(true) to give a PTY and V(false) to not give a PTY."
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_PARAMIKO_PTY
|
||||||
|
ini:
|
||||||
|
- section: paramiko_connection
|
||||||
|
key: pty
|
||||||
|
type: boolean
|
||||||
|
record_host_keys:
|
||||||
|
default: True
|
||||||
|
description: "Save the host keys to a file."
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_PARAMIKO_RECORD_HOST_KEYS
|
||||||
|
ini:
|
||||||
|
- section: paramiko_connection
|
||||||
|
key: record_host_keys
|
||||||
|
type: boolean
|
||||||
|
host_key_checking:
|
||||||
|
description: "Set this to V(false) if you want to avoid host key checking by the underlying tools Ansible uses to connect to the host."
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_HOST_KEY_CHECKING
|
||||||
|
- name: ANSIBLE_SSH_HOST_KEY_CHECKING
|
||||||
|
- name: ANSIBLE_PARAMIKO_HOST_KEY_CHECKING
|
||||||
|
ini:
|
||||||
|
- section: defaults
|
||||||
|
key: host_key_checking
|
||||||
|
- section: paramiko_connection
|
||||||
|
key: host_key_checking
|
||||||
|
vars:
|
||||||
|
- name: ansible_host_key_checking
|
||||||
|
- name: ansible_ssh_host_key_checking
|
||||||
|
- name: ansible_paramiko_host_key_checking
|
||||||
|
use_persistent_connections:
|
||||||
|
description: "Toggles the use of persistence for connections."
|
||||||
|
type: boolean
|
||||||
|
default: False
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_USE_PERSISTENT_CONNECTIONS
|
||||||
|
ini:
|
||||||
|
- section: defaults
|
||||||
|
key: use_persistent_connections
|
||||||
|
banner_timeout:
|
||||||
|
type: float
|
||||||
|
default: 30
|
||||||
|
description:
|
||||||
|
- Configures, in seconds, the amount of time to wait for the SSH
|
||||||
|
banner to be presented. This option is supported by paramiko
|
||||||
|
version 1.15.0 or newer.
|
||||||
|
ini:
|
||||||
|
- section: paramiko_connection
|
||||||
|
key: banner_timeout
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_PARAMIKO_BANNER_TIMEOUT
|
||||||
|
timeout:
|
||||||
|
type: int
|
||||||
|
default: 10
|
||||||
|
description: Number of seconds until the plugin gives up on failing to establish a TCP connection.
|
||||||
|
ini:
|
||||||
|
- section: defaults
|
||||||
|
key: timeout
|
||||||
|
- section: ssh_connection
|
||||||
|
key: timeout
|
||||||
|
- section: paramiko_connection
|
||||||
|
key: timeout
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_TIMEOUT
|
||||||
|
- name: ANSIBLE_SSH_TIMEOUT
|
||||||
|
- name: ANSIBLE_PARAMIKO_TIMEOUT
|
||||||
|
vars:
|
||||||
|
- name: ansible_ssh_timeout
|
||||||
|
- name: ansible_paramiko_timeout
|
||||||
|
cli:
|
||||||
|
- name: timeout
|
||||||
|
lock_file_timeout:
|
||||||
|
type: int
|
||||||
|
default: 60
|
||||||
|
description: Number of seconds until the plugin gives up on trying to write a lock file when writing SSH known host keys.
|
||||||
|
vars:
|
||||||
|
- name: ansible_lock_file_timeout
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_LOCK_FILE_TIMEOUT
|
||||||
|
private_key_file:
|
||||||
|
description:
|
||||||
|
- Path to private key file to use for authentication.
|
||||||
|
type: string
|
||||||
|
ini:
|
||||||
|
- section: defaults
|
||||||
|
key: private_key_file
|
||||||
|
- section: paramiko_connection
|
||||||
|
key: private_key_file
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_PRIVATE_KEY_FILE
|
||||||
|
- name: ANSIBLE_PARAMIKO_PRIVATE_KEY_FILE
|
||||||
|
vars:
|
||||||
|
- name: ansible_private_key_file
|
||||||
|
- name: ansible_ssh_private_key_file
|
||||||
|
- name: ansible_paramiko_private_key_file
|
||||||
|
cli:
|
||||||
|
- name: private_key_file
|
||||||
|
option: "--private-key"
|
||||||
|
vmid:
|
||||||
|
description:
|
||||||
|
- LXC Container ID
|
||||||
|
type: int
|
||||||
|
vars:
|
||||||
|
- name: proxmox_vmid
|
||||||
|
proxmox_become_method:
|
||||||
|
description:
|
||||||
|
- Become command used in proxmox
|
||||||
|
type: str
|
||||||
|
default: sudo
|
||||||
|
vars:
|
||||||
|
- name: proxmox_become_method
|
||||||
|
notes:
|
||||||
|
- >
|
||||||
|
When NOT using this plugin as root, you need to have a become mechanism,
|
||||||
|
e.g. C(sudo), installed on Proxmox and setup so we can run it without prompting for the password.
|
||||||
|
Inside the container, we need a shell, for example C(sh) and the C(cat) command to be available in the C(PATH) for this plugin to work.
|
||||||
|
"""
|
||||||
|
|
||||||
|
EXAMPLES = r"""
|
||||||
|
# --------------------------------------------------------------
|
||||||
|
# Setup sudo with password less access to pct for user 'ansible':
|
||||||
|
# --------------------------------------------------------------
|
||||||
|
#
|
||||||
|
# Open a Proxmox root shell and execute:
|
||||||
|
# $ useradd -d /opt/ansible-pct -r -m -s /bin/sh ansible
|
||||||
|
# $ mkdir -p /opt/ansible-pct/.ssh
|
||||||
|
# $ ssh-keygen -t ed25519 -C 'ansible' -N "" -f /opt/ansible-pct/.ssh/ansible <<< y > /dev/null
|
||||||
|
# $ cat /opt/ansible-pct/.ssh/ansible
|
||||||
|
# $ mv /opt/ansible-pct/.ssh/ansible.pub /opt/ansible-pct/.ssh/authorized-keys
|
||||||
|
# $ rm /opt/ansible-pct/.ssh/ansible*
|
||||||
|
# $ chown -R ansible:ansible /opt/ansible-pct/.ssh
|
||||||
|
# $ chmod 700 /opt/ansible-pct/.ssh
|
||||||
|
# $ chmod 600 /opt/ansible-pct/.ssh/authorized-keys
|
||||||
|
# $ echo 'ansible ALL = (root) NOPASSWD: /usr/sbin/pct' > /etc/sudoers.d/ansible_pct
|
||||||
|
#
|
||||||
|
# Save the displayed private key and add it to your ssh-agent
|
||||||
|
#
|
||||||
|
# Or use ansible:
|
||||||
|
# ---
|
||||||
|
# - name: Setup ansible-pct user and configure environment on Proxmox host
|
||||||
|
# hosts: proxmox
|
||||||
|
# become: true
|
||||||
|
# gather_facts: false
|
||||||
|
#
|
||||||
|
# tasks:
|
||||||
|
# - name: Create ansible user
|
||||||
|
# ansible.builtin.user:
|
||||||
|
# name: ansible
|
||||||
|
# comment: Ansible User
|
||||||
|
# home: /opt/ansible-pct
|
||||||
|
# shell: /bin/sh
|
||||||
|
# create_home: true
|
||||||
|
# system: true
|
||||||
|
#
|
||||||
|
# - name: Create .ssh directory
|
||||||
|
# ansible.builtin.file:
|
||||||
|
# path: /opt/ansible-pct/.ssh
|
||||||
|
# state: directory
|
||||||
|
# owner: ansible
|
||||||
|
# group: ansible
|
||||||
|
# mode: '0700'
|
||||||
|
#
|
||||||
|
# - name: Generate SSH key for ansible user
|
||||||
|
# community.crypto.openssh_keypair:
|
||||||
|
# path: /opt/ansible-pct/.ssh/ansible
|
||||||
|
# type: ed25519
|
||||||
|
# comment: 'ansible'
|
||||||
|
# force: true
|
||||||
|
# mode: '0600'
|
||||||
|
# owner: ansible
|
||||||
|
# group: ansible
|
||||||
|
#
|
||||||
|
# - name: Set public key as authorized key
|
||||||
|
# ansible.builtin.copy:
|
||||||
|
# src: /opt/ansible-pct/.ssh/ansible.pub
|
||||||
|
# dest: /opt/ansible-pct/.ssh/authorized-keys
|
||||||
|
# remote_src: yes
|
||||||
|
# owner: ansible
|
||||||
|
# group: ansible
|
||||||
|
# mode: '0600'
|
||||||
|
#
|
||||||
|
# - name: Add sudoers entry for ansible user
|
||||||
|
# ansible.builtin.copy:
|
||||||
|
# content: 'ansible ALL = (root) NOPASSWD: /usr/sbin/pct'
|
||||||
|
# dest: /etc/sudoers.d/ansible_pct
|
||||||
|
# owner: root
|
||||||
|
# group: root
|
||||||
|
# mode: '0440'
|
||||||
|
#
|
||||||
|
# - name: Fetch private SSH key to localhost
|
||||||
|
# ansible.builtin.fetch:
|
||||||
|
# src: /opt/ansible-pct/.ssh/ansible
|
||||||
|
# dest: ~/.ssh/proxmox_ansible_private_key
|
||||||
|
# flat: yes
|
||||||
|
# fail_on_missing: true
|
||||||
|
#
|
||||||
|
# - name: Clean up generated SSH keys
|
||||||
|
# ansible.builtin.file:
|
||||||
|
# path: /opt/ansible-pct/.ssh/ansible*
|
||||||
|
# state: absent
|
||||||
|
#
|
||||||
|
# - name: Configure private key permissions on localhost
|
||||||
|
# hosts: localhost
|
||||||
|
# tasks:
|
||||||
|
# - name: Set permissions for fetched private key
|
||||||
|
# ansible.builtin.file:
|
||||||
|
# path: ~/.ssh/proxmox_ansible_private_key
|
||||||
|
# mode: '0600'
|
||||||
|
#
|
||||||
|
# --------------------------------
|
||||||
|
# Static inventory file: hosts.yml
|
||||||
|
# --------------------------------
|
||||||
|
# all:
|
||||||
|
# children:
|
||||||
|
# lxc:
|
||||||
|
# hosts:
|
||||||
|
# container-1:
|
||||||
|
# ansible_host: 10.0.0.10
|
||||||
|
# proxmox_vmid: 100
|
||||||
|
# ansible_connection: community.general.proxmox_pct_remote
|
||||||
|
# ansible_user: ansible
|
||||||
|
# container-2:
|
||||||
|
# ansible_host: 10.0.0.10
|
||||||
|
# proxmox_vmid: 200
|
||||||
|
# ansible_connection: community.general.proxmox_pct_remote
|
||||||
|
# ansible_user: ansible
|
||||||
|
# proxmox:
|
||||||
|
# hosts:
|
||||||
|
# proxmox-1:
|
||||||
|
# ansible_host: 10.0.0.10
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# ---------------------------------------------
|
||||||
|
# Dynamic inventory file: inventory.proxmox.yml
|
||||||
|
# ---------------------------------------------
|
||||||
|
# plugin: community.general.proxmox
|
||||||
|
# url: https://10.0.0.10:8006
|
||||||
|
# validate_certs: false
|
||||||
|
# user: ansible@pam
|
||||||
|
# token_id: ansible
|
||||||
|
# token_secret: !vault |
|
||||||
|
# $ANSIBLE_VAULT;1.1;AES256
|
||||||
|
# ...
|
||||||
|
|
||||||
|
# want_facts: true
|
||||||
|
# exclude_nodes: true
|
||||||
|
# filters:
|
||||||
|
# - proxmox_vmtype == "lxc"
|
||||||
|
# want_proxmox_nodes_ansible_host: false
|
||||||
|
# compose:
|
||||||
|
# ansible_host: "'10.0.0.10'"
|
||||||
|
# ansible_connection: "'community.general.proxmox_pct_remote'"
|
||||||
|
# ansible_user: "'ansible'"
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# ----------------------
|
||||||
|
# Playbook: playbook.yml
|
||||||
|
# ----------------------
|
||||||
|
---
|
||||||
|
- hosts: lxc
|
||||||
|
# On nodes with many containers you might want to deactivate the devices facts
|
||||||
|
# or set `gather_facts: false` if you don't need them.
|
||||||
|
# More info on gathering fact subsets:
|
||||||
|
# https://docs.ansible.com/ansible/latest/collections/ansible/builtin/setup_module.html
|
||||||
|
#
|
||||||
|
# gather_facts: true
|
||||||
|
# gather_subset:
|
||||||
|
# - "!devices"
|
||||||
|
tasks:
|
||||||
|
- name: Ping LXC container
|
||||||
|
ansible.builtin.ping:
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import socket
|
||||||
|
import tempfile
|
||||||
|
import traceback
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.errors import (
|
||||||
|
AnsibleAuthenticationFailure,
|
||||||
|
AnsibleConnectionFailure,
|
||||||
|
AnsibleError,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.general.plugins.module_utils._filelock import FileLock, LockTimeout
|
||||||
|
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||||
|
from ansible.plugins.connection import ConnectionBase
|
||||||
|
from ansible.utils.display import Display
|
||||||
|
from ansible.utils.path import makedirs_safe
|
||||||
|
from binascii import hexlify
|
||||||
|
|
||||||
|
try:
|
||||||
|
import paramiko
|
||||||
|
PARAMIKO_IMPORT_ERR = None
|
||||||
|
except ImportError:
|
||||||
|
paramiko = None
|
||||||
|
PARAMIKO_IMPORT_ERR = traceback.format_exc()
|
||||||
|
|
||||||
|
|
||||||
|
display = Display()
|
||||||
|
|
||||||
|
|
||||||
|
def authenticity_msg(hostname: str, ktype: str, fingerprint: str) -> str:
|
||||||
|
msg = f"""
|
||||||
|
paramiko: The authenticity of host '{hostname}' can't be established.
|
||||||
|
The {ktype} key fingerprint is {fingerprint}.
|
||||||
|
Are you sure you want to continue connecting (yes/no)?
|
||||||
|
"""
|
||||||
|
return msg
|
||||||
|
|
||||||
|
|
||||||
|
MissingHostKeyPolicy: type = object
|
||||||
|
if paramiko:
|
||||||
|
MissingHostKeyPolicy = paramiko.MissingHostKeyPolicy
|
||||||
|
|
||||||
|
|
||||||
|
class MyAddPolicy(MissingHostKeyPolicy):
|
||||||
|
"""
|
||||||
|
Based on AutoAddPolicy in paramiko so we can determine when keys are added
|
||||||
|
|
||||||
|
and also prompt for input.
|
||||||
|
|
||||||
|
Policy for automatically adding the hostname and new host key to the
|
||||||
|
local L{HostKeys} object, and saving it. This is used by L{SSHClient}.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, connection: Connection) -> None:
|
||||||
|
self.connection = connection
|
||||||
|
self._options = connection._options
|
||||||
|
|
||||||
|
def missing_host_key(self, client, hostname, key) -> None:
|
||||||
|
|
||||||
|
if all((self.connection.get_option('host_key_checking'), not self.connection.get_option('host_key_auto_add'))):
|
||||||
|
|
||||||
|
fingerprint = hexlify(key.get_fingerprint())
|
||||||
|
ktype = key.get_name()
|
||||||
|
|
||||||
|
if self.connection.get_option('use_persistent_connections') or self.connection.force_persistence:
|
||||||
|
# don't print the prompt string since the user cannot respond
|
||||||
|
# to the question anyway
|
||||||
|
raise AnsibleError(authenticity_msg(hostname, ktype, fingerprint)[1:92])
|
||||||
|
|
||||||
|
inp = to_text(
|
||||||
|
display.prompt_until(authenticity_msg(hostname, ktype, fingerprint), private=False),
|
||||||
|
errors='surrogate_or_strict'
|
||||||
|
)
|
||||||
|
|
||||||
|
if inp.lower() not in ['yes', 'y', '']:
|
||||||
|
raise AnsibleError('host connection rejected by user')
|
||||||
|
|
||||||
|
key._added_by_ansible_this_time = True
|
||||||
|
|
||||||
|
# existing implementation below:
|
||||||
|
client._host_keys.add(hostname, key.get_name(), key)
|
||||||
|
|
||||||
|
# host keys are actually saved in close() function below
|
||||||
|
# in order to control ordering.
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ConnectionBase):
|
||||||
|
""" SSH based connections (paramiko) to Proxmox pct """
|
||||||
|
|
||||||
|
transport = 'community.general.proxmox_pct_remote'
|
||||||
|
_log_channel: str | None = None
|
||||||
|
|
||||||
|
def __init__(self, play_context, new_stdin, *args, **kwargs):
|
||||||
|
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
|
||||||
|
|
||||||
|
def _set_log_channel(self, name: str) -> None:
|
||||||
|
""" Mimic paramiko.SSHClient.set_log_channel """
|
||||||
|
self._log_channel = name
|
||||||
|
|
||||||
|
def _parse_proxy_command(self, port: int = 22) -> dict[str, t.Any]:
|
||||||
|
proxy_command = self.get_option('proxy_command') or None
|
||||||
|
|
||||||
|
sock_kwarg = {}
|
||||||
|
if proxy_command:
|
||||||
|
replacers = {
|
||||||
|
'%h': self.get_option('remote_addr'),
|
||||||
|
'%p': port,
|
||||||
|
'%r': self.get_option('remote_user')
|
||||||
|
}
|
||||||
|
for find, replace in replacers.items():
|
||||||
|
proxy_command = proxy_command.replace(find, str(replace))
|
||||||
|
try:
|
||||||
|
sock_kwarg = {'sock': paramiko.ProxyCommand(proxy_command)}
|
||||||
|
display.vvv(f'CONFIGURE PROXY COMMAND FOR CONNECTION: {proxy_command}', host=self.get_option('remote_addr'))
|
||||||
|
except AttributeError:
|
||||||
|
display.warning('Paramiko ProxyCommand support unavailable. '
|
||||||
|
'Please upgrade to Paramiko 1.9.0 or newer. '
|
||||||
|
'Not using configured ProxyCommand')
|
||||||
|
|
||||||
|
return sock_kwarg
|
||||||
|
|
||||||
|
def _connect(self) -> Connection:
|
||||||
|
""" activates the connection object """
|
||||||
|
|
||||||
|
if PARAMIKO_IMPORT_ERR is not None:
|
||||||
|
raise AnsibleError(f'paramiko is not installed: {to_native(PARAMIKO_IMPORT_ERR)}')
|
||||||
|
|
||||||
|
port = self.get_option('port')
|
||||||
|
display.vvv(f'ESTABLISH PARAMIKO SSH CONNECTION FOR USER: {self.get_option("remote_user")} on PORT {to_text(port)} TO {self.get_option("remote_addr")}',
|
||||||
|
host=self.get_option('remote_addr'))
|
||||||
|
|
||||||
|
ssh = paramiko.SSHClient()
|
||||||
|
|
||||||
|
# Set pubkey and hostkey algorithms to disable, the only manipulation allowed currently
|
||||||
|
# is keeping or omitting rsa-sha2 algorithms
|
||||||
|
# default_keys: t.Tuple[str] = ()
|
||||||
|
paramiko_preferred_pubkeys = getattr(paramiko.Transport, '_preferred_pubkeys', ())
|
||||||
|
paramiko_preferred_hostkeys = getattr(paramiko.Transport, '_preferred_keys', ())
|
||||||
|
use_rsa_sha2_algorithms = self.get_option('use_rsa_sha2_algorithms')
|
||||||
|
disabled_algorithms: t.Dict[str, t.Iterable[str]] = {}
|
||||||
|
if not use_rsa_sha2_algorithms:
|
||||||
|
if paramiko_preferred_pubkeys:
|
||||||
|
disabled_algorithms['pubkeys'] = tuple(a for a in paramiko_preferred_pubkeys if 'rsa-sha2' in a)
|
||||||
|
if paramiko_preferred_hostkeys:
|
||||||
|
disabled_algorithms['keys'] = tuple(a for a in paramiko_preferred_hostkeys if 'rsa-sha2' in a)
|
||||||
|
|
||||||
|
# override paramiko's default logger name
|
||||||
|
if self._log_channel is not None:
|
||||||
|
ssh.set_log_channel(self._log_channel)
|
||||||
|
|
||||||
|
self.keyfile = os.path.expanduser('~/.ssh/known_hosts')
|
||||||
|
|
||||||
|
if self.get_option('host_key_checking'):
|
||||||
|
for ssh_known_hosts in ('/etc/ssh/ssh_known_hosts', '/etc/openssh/ssh_known_hosts'):
|
||||||
|
try:
|
||||||
|
ssh.load_system_host_keys(ssh_known_hosts)
|
||||||
|
break
|
||||||
|
except IOError:
|
||||||
|
pass # file was not found, but not required to function
|
||||||
|
except paramiko.hostkeys.InvalidHostKey as e:
|
||||||
|
raise AnsibleConnectionFailure(f'Invalid host key: {to_text(e.line)}')
|
||||||
|
try:
|
||||||
|
ssh.load_system_host_keys()
|
||||||
|
except paramiko.hostkeys.InvalidHostKey as e:
|
||||||
|
raise AnsibleConnectionFailure(f'Invalid host key: {to_text(e.line)}')
|
||||||
|
|
||||||
|
ssh_connect_kwargs = self._parse_proxy_command(port)
|
||||||
|
ssh.set_missing_host_key_policy(MyAddPolicy(self))
|
||||||
|
conn_password = self.get_option('password')
|
||||||
|
allow_agent = True
|
||||||
|
|
||||||
|
if conn_password is not None:
|
||||||
|
allow_agent = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
key_filename = None
|
||||||
|
if self.get_option('private_key_file'):
|
||||||
|
key_filename = os.path.expanduser(self.get_option('private_key_file'))
|
||||||
|
|
||||||
|
# paramiko 2.2 introduced auth_timeout parameter
|
||||||
|
if LooseVersion(paramiko.__version__) >= LooseVersion('2.2.0'):
|
||||||
|
ssh_connect_kwargs['auth_timeout'] = self.get_option('timeout')
|
||||||
|
|
||||||
|
# paramiko 1.15 introduced banner timeout parameter
|
||||||
|
if LooseVersion(paramiko.__version__) >= LooseVersion('1.15.0'):
|
||||||
|
ssh_connect_kwargs['banner_timeout'] = self.get_option('banner_timeout')
|
||||||
|
|
||||||
|
ssh.connect(
|
||||||
|
self.get_option('remote_addr').lower(),
|
||||||
|
username=self.get_option('remote_user'),
|
||||||
|
allow_agent=allow_agent,
|
||||||
|
look_for_keys=self.get_option('look_for_keys'),
|
||||||
|
key_filename=key_filename,
|
||||||
|
password=conn_password,
|
||||||
|
timeout=self.get_option('timeout'),
|
||||||
|
port=port,
|
||||||
|
disabled_algorithms=disabled_algorithms,
|
||||||
|
**ssh_connect_kwargs,
|
||||||
|
)
|
||||||
|
except paramiko.ssh_exception.BadHostKeyException as e:
|
||||||
|
raise AnsibleConnectionFailure(f'host key mismatch for {to_text(e.hostname)}')
|
||||||
|
except paramiko.ssh_exception.AuthenticationException as e:
|
||||||
|
msg = f'Failed to authenticate: {e}'
|
||||||
|
raise AnsibleAuthenticationFailure(msg)
|
||||||
|
except Exception as e:
|
||||||
|
msg = to_text(e)
|
||||||
|
if u'PID check failed' in msg:
|
||||||
|
raise AnsibleError('paramiko version issue, please upgrade paramiko on the machine running ansible')
|
||||||
|
elif u'Private key file is encrypted' in msg:
|
||||||
|
msg = f'ssh {self.get_option("remote_user")}@{self.get_options("remote_addr")}:{port} : ' + \
|
||||||
|
f'{msg}\nTo connect as a different user, use -u <username>.'
|
||||||
|
raise AnsibleConnectionFailure(msg)
|
||||||
|
else:
|
||||||
|
raise AnsibleConnectionFailure(msg)
|
||||||
|
self.ssh = ssh
|
||||||
|
self._connected = True
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _any_keys_added(self) -> bool:
|
||||||
|
for hostname, keys in self.ssh._host_keys.items():
|
||||||
|
for keytype, key in keys.items():
|
||||||
|
added_this_time = getattr(key, '_added_by_ansible_this_time', False)
|
||||||
|
if added_this_time:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _save_ssh_host_keys(self, filename: str) -> None:
|
||||||
|
"""
|
||||||
|
not using the paramiko save_ssh_host_keys function as we want to add new SSH keys at the bottom so folks
|
||||||
|
don't complain about it :)
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not self._any_keys_added():
|
||||||
|
return
|
||||||
|
|
||||||
|
path = os.path.expanduser('~/.ssh')
|
||||||
|
makedirs_safe(path)
|
||||||
|
|
||||||
|
with open(filename, 'w') as f:
|
||||||
|
for hostname, keys in self.ssh._host_keys.items():
|
||||||
|
for keytype, key in keys.items():
|
||||||
|
# was f.write
|
||||||
|
added_this_time = getattr(key, '_added_by_ansible_this_time', False)
|
||||||
|
if not added_this_time:
|
||||||
|
f.write(f'{hostname} {keytype} {key.get_base64()}\n')
|
||||||
|
|
||||||
|
for hostname, keys in self.ssh._host_keys.items():
|
||||||
|
for keytype, key in keys.items():
|
||||||
|
added_this_time = getattr(key, '_added_by_ansible_this_time', False)
|
||||||
|
if added_this_time:
|
||||||
|
f.write(f'{hostname} {keytype} {key.get_base64()}\n')
|
||||||
|
|
||||||
|
def _build_pct_command(self, cmd: str) -> str:
|
||||||
|
cmd = ['/usr/sbin/pct', 'exec', str(self.get_option('vmid')), '--', cmd]
|
||||||
|
if self.get_option('remote_user') != 'root':
|
||||||
|
cmd = [self.get_option('proxmox_become_method')] + cmd
|
||||||
|
display.vvv(f'INFO Running as non root user: {self.get_option("remote_user")}, trying to run pct with become method: ' +
|
||||||
|
f'{self.get_option("proxmox_become_method")}',
|
||||||
|
host=self.get_option('remote_addr'))
|
||||||
|
return ' '.join(cmd)
|
||||||
|
|
||||||
|
def exec_command(self, cmd: str, in_data: bytes | None = None, sudoable: bool = True) -> tuple[int, bytes, bytes]:
|
||||||
|
""" run a command on inside the LXC container """
|
||||||
|
|
||||||
|
cmd = self._build_pct_command(cmd)
|
||||||
|
|
||||||
|
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
||||||
|
|
||||||
|
bufsize = 4096
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.ssh.get_transport().set_keepalive(5)
|
||||||
|
chan = self.ssh.get_transport().open_session()
|
||||||
|
except Exception as e:
|
||||||
|
text_e = to_text(e)
|
||||||
|
msg = 'Failed to open session'
|
||||||
|
if text_e:
|
||||||
|
msg += f': {text_e}'
|
||||||
|
raise AnsibleConnectionFailure(to_native(msg))
|
||||||
|
|
||||||
|
# sudo usually requires a PTY (cf. requiretty option), therefore
|
||||||
|
# we give it one by default (pty=True in ansible.cfg), and we try
|
||||||
|
# to initialise from the calling environment when sudoable is enabled
|
||||||
|
if self.get_option('pty') and sudoable:
|
||||||
|
chan.get_pty(term=os.getenv('TERM', 'vt100'), width=int(os.getenv('COLUMNS', 0)), height=int(os.getenv('LINES', 0)))
|
||||||
|
|
||||||
|
display.vvv(f'EXEC {cmd}', host=self.get_option('remote_addr'))
|
||||||
|
|
||||||
|
cmd = to_bytes(cmd, errors='surrogate_or_strict')
|
||||||
|
|
||||||
|
no_prompt_out = b''
|
||||||
|
no_prompt_err = b''
|
||||||
|
become_output = b''
|
||||||
|
|
||||||
|
try:
|
||||||
|
chan.exec_command(cmd)
|
||||||
|
if self.become and self.become.expect_prompt():
|
||||||
|
password_prompt = False
|
||||||
|
become_success = False
|
||||||
|
while not (become_success or password_prompt):
|
||||||
|
display.debug('Waiting for Privilege Escalation input')
|
||||||
|
|
||||||
|
chunk = chan.recv(bufsize)
|
||||||
|
display.debug(f'chunk is: {to_text(chunk)}')
|
||||||
|
if not chunk:
|
||||||
|
if b'unknown user' in become_output:
|
||||||
|
n_become_user = to_native(self.become.get_option('become_user'))
|
||||||
|
raise AnsibleError(f'user {n_become_user} does not exist')
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
# raise AnsibleError('ssh connection closed waiting for password prompt')
|
||||||
|
become_output += chunk
|
||||||
|
|
||||||
|
# need to check every line because we might get lectured
|
||||||
|
# and we might get the middle of a line in a chunk
|
||||||
|
for line in become_output.splitlines(True):
|
||||||
|
if self.become.check_success(line):
|
||||||
|
become_success = True
|
||||||
|
break
|
||||||
|
elif self.become.check_password_prompt(line):
|
||||||
|
password_prompt = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if password_prompt:
|
||||||
|
if self.become:
|
||||||
|
become_pass = self.become.get_option('become_pass')
|
||||||
|
chan.sendall(to_bytes(become_pass, errors='surrogate_or_strict') + b'\n')
|
||||||
|
else:
|
||||||
|
raise AnsibleError('A password is required but none was supplied')
|
||||||
|
else:
|
||||||
|
no_prompt_out += become_output
|
||||||
|
no_prompt_err += become_output
|
||||||
|
|
||||||
|
if in_data:
|
||||||
|
for i in range(0, len(in_data), bufsize):
|
||||||
|
chan.send(in_data[i:i + bufsize])
|
||||||
|
chan.shutdown_write()
|
||||||
|
elif in_data == b'':
|
||||||
|
chan.shutdown_write()
|
||||||
|
|
||||||
|
except socket.timeout:
|
||||||
|
raise AnsibleError('ssh timed out waiting for privilege escalation.\n' + to_text(become_output))
|
||||||
|
|
||||||
|
stdout = b''.join(chan.makefile('rb', bufsize))
|
||||||
|
stderr = b''.join(chan.makefile_stderr('rb', bufsize))
|
||||||
|
returncode = chan.recv_exit_status()
|
||||||
|
|
||||||
|
if 'pct: not found' in stderr.decode('utf-8'):
|
||||||
|
raise AnsibleError(
|
||||||
|
f'pct not found in path of host: {to_text(self.get_option("remote_addr"))}')
|
||||||
|
|
||||||
|
return (returncode, no_prompt_out + stdout, no_prompt_out + stderr)
|
||||||
|
|
||||||
|
def put_file(self, in_path: str, out_path: str) -> None:
|
||||||
|
""" transfer a file from local to remote """
|
||||||
|
|
||||||
|
display.vvv(f'PUT {in_path} TO {out_path}', host=self.get_option('remote_addr'))
|
||||||
|
try:
|
||||||
|
with open(in_path, 'rb') as f:
|
||||||
|
data = f.read()
|
||||||
|
returncode, stdout, stderr = self.exec_command(
|
||||||
|
' '.join([
|
||||||
|
self._shell.executable, '-c',
|
||||||
|
self._shell.quote(f'cat > {out_path}')]),
|
||||||
|
in_data=data,
|
||||||
|
sudoable=False)
|
||||||
|
if returncode != 0:
|
||||||
|
if 'cat: not found' in stderr.decode('utf-8'):
|
||||||
|
raise AnsibleError(
|
||||||
|
f'cat not found in path of container: {to_text(self.get_option("vmid"))}')
|
||||||
|
raise AnsibleError(
|
||||||
|
f'{to_text(stdout)}\n{to_text(stderr)}')
|
||||||
|
except Exception as e:
|
||||||
|
raise AnsibleError(
|
||||||
|
f'error occurred while putting file from {in_path} to {out_path}!\n{to_text(e)}')
|
||||||
|
|
||||||
|
def fetch_file(self, in_path: str, out_path: str) -> None:
|
||||||
|
""" save a remote file to the specified path """
|
||||||
|
|
||||||
|
display.vvv(f'FETCH {in_path} TO {out_path}', host=self.get_option('remote_addr'))
|
||||||
|
try:
|
||||||
|
returncode, stdout, stderr = self.exec_command(
|
||||||
|
' '.join([
|
||||||
|
self._shell.executable, '-c',
|
||||||
|
self._shell.quote(f'cat {in_path}')]),
|
||||||
|
sudoable=False)
|
||||||
|
if returncode != 0:
|
||||||
|
if 'cat: not found' in stderr.decode('utf-8'):
|
||||||
|
raise AnsibleError(
|
||||||
|
f'cat not found in path of container: {to_text(self.get_option("vmid"))}')
|
||||||
|
raise AnsibleError(
|
||||||
|
f'{to_text(stdout)}\n{to_text(stderr)}')
|
||||||
|
with open(out_path, 'wb') as f:
|
||||||
|
f.write(stdout)
|
||||||
|
except Exception as e:
|
||||||
|
raise AnsibleError(
|
||||||
|
f'error occurred while fetching file from {in_path} to {out_path}!\n{to_text(e)}')
|
||||||
|
|
||||||
|
def reset(self) -> None:
|
||||||
|
""" reset the connection """
|
||||||
|
|
||||||
|
if not self._connected:
|
||||||
|
return
|
||||||
|
self.close()
|
||||||
|
self._connect()
|
||||||
|
|
||||||
|
def close(self) -> None:
|
||||||
|
""" terminate the connection """
|
||||||
|
|
||||||
|
if self.get_option('host_key_checking') and self.get_option('record_host_keys') and self._any_keys_added():
|
||||||
|
# add any new SSH host keys -- warning -- this could be slow
|
||||||
|
# (This doesn't acquire the connection lock because it needs
|
||||||
|
# to exclude only other known_hosts writers, not connections
|
||||||
|
# that are starting up.)
|
||||||
|
lockfile = os.path.basename(self.keyfile)
|
||||||
|
dirname = os.path.dirname(self.keyfile)
|
||||||
|
makedirs_safe(dirname)
|
||||||
|
tmp_keyfile_name = None
|
||||||
|
try:
|
||||||
|
with FileLock().lock_file(lockfile, dirname, self.get_option('lock_file_timeout')):
|
||||||
|
# just in case any were added recently
|
||||||
|
|
||||||
|
self.ssh.load_system_host_keys()
|
||||||
|
self.ssh._host_keys.update(self.ssh._system_host_keys)
|
||||||
|
|
||||||
|
# gather information about the current key file, so
|
||||||
|
# we can ensure the new file has the correct mode/owner
|
||||||
|
|
||||||
|
key_dir = os.path.dirname(self.keyfile)
|
||||||
|
if os.path.exists(self.keyfile):
|
||||||
|
key_stat = os.stat(self.keyfile)
|
||||||
|
mode = key_stat.st_mode & 0o777
|
||||||
|
uid = key_stat.st_uid
|
||||||
|
gid = key_stat.st_gid
|
||||||
|
else:
|
||||||
|
mode = 0o644
|
||||||
|
uid = os.getuid()
|
||||||
|
gid = os.getgid()
|
||||||
|
|
||||||
|
# Save the new keys to a temporary file and move it into place
|
||||||
|
# rather than rewriting the file. We set delete=False because
|
||||||
|
# the file will be moved into place rather than cleaned up.
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile(dir=key_dir, delete=False) as tmp_keyfile:
|
||||||
|
tmp_keyfile_name = tmp_keyfile.name
|
||||||
|
os.chmod(tmp_keyfile_name, mode)
|
||||||
|
os.chown(tmp_keyfile_name, uid, gid)
|
||||||
|
self._save_ssh_host_keys(tmp_keyfile_name)
|
||||||
|
|
||||||
|
os.rename(tmp_keyfile_name, self.keyfile)
|
||||||
|
except LockTimeout:
|
||||||
|
raise AnsibleError(
|
||||||
|
f'writing lock file for {self.keyfile} ran in to the timeout of {self.get_option("lock_file_timeout")}s')
|
||||||
|
except paramiko.hostkeys.InvalidHostKey as e:
|
||||||
|
raise AnsibleConnectionFailure(f'Invalid host key: {e.line}')
|
||||||
|
except Exception as e:
|
||||||
|
# unable to save keys, including scenario when key was invalid
|
||||||
|
# and caught earlier
|
||||||
|
raise AnsibleError(
|
||||||
|
f'error occurred while writing SSH host keys!\n{to_text(e)}')
|
||||||
|
finally:
|
||||||
|
if tmp_keyfile_name is not None:
|
||||||
|
pathlib.Path(tmp_keyfile_name).unlink(missing_ok=True)
|
||||||
|
|
||||||
|
self.ssh.close()
|
||||||
|
self._connected = False
|
||||||
@@ -8,38 +8,36 @@
|
|||||||
#
|
#
|
||||||
# Written by: Kushal Das (https://github.com/kushaldas)
|
# Written by: Kushal Das (https://github.com/kushaldas)
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
name: qubes
|
name: qubes
|
||||||
short_description: Interact with an existing QubesOS AppVM
|
short_description: Interact with an existing QubesOS AppVM
|
||||||
|
|
||||||
|
description:
|
||||||
|
- Run commands or put/fetch files to an existing Qubes AppVM using qubes tools.
|
||||||
|
author: Kushal Das (@kushaldas)
|
||||||
|
|
||||||
|
|
||||||
|
options:
|
||||||
|
remote_addr:
|
||||||
description:
|
description:
|
||||||
- Run commands or put/fetch files to an existing Qubes AppVM using qubes tools.
|
- VM name.
|
||||||
|
type: string
|
||||||
author: Kushal Das (@kushaldas)
|
default: inventory_hostname
|
||||||
|
vars:
|
||||||
|
- name: ansible_host
|
||||||
options:
|
remote_user:
|
||||||
remote_addr:
|
description:
|
||||||
description:
|
- The user to execute as inside the VM.
|
||||||
- VM name.
|
type: string
|
||||||
type: string
|
default: The I(user) account as default in Qubes OS.
|
||||||
default: inventory_hostname
|
vars:
|
||||||
vars:
|
- name: ansible_user
|
||||||
- name: ansible_host
|
|
||||||
remote_user:
|
|
||||||
description:
|
|
||||||
- The user to execute as inside the VM.
|
|
||||||
type: string
|
|
||||||
default: The I(user) account as default in Qubes OS.
|
|
||||||
vars:
|
|
||||||
- name: ansible_user
|
|
||||||
# keyword:
|
# keyword:
|
||||||
# - name: hosts
|
# - name: hosts
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
@@ -78,7 +76,7 @@ class Connection(ConnectionBase):
|
|||||||
"""
|
"""
|
||||||
display.vvvv("CMD: ", cmd)
|
display.vvvv("CMD: ", cmd)
|
||||||
if not cmd.endswith("\n"):
|
if not cmd.endswith("\n"):
|
||||||
cmd = cmd + "\n"
|
cmd = f"{cmd}\n"
|
||||||
local_cmd = []
|
local_cmd = []
|
||||||
|
|
||||||
# For dom0
|
# For dom0
|
||||||
@@ -95,7 +93,7 @@ class Connection(ConnectionBase):
|
|||||||
|
|
||||||
display.vvvv("Local cmd: ", local_cmd)
|
display.vvvv("Local cmd: ", local_cmd)
|
||||||
|
|
||||||
display.vvv("RUN %s" % (local_cmd,), host=self._remote_vmname)
|
display.vvv(f"RUN {local_cmd}", host=self._remote_vmname)
|
||||||
p = subprocess.Popen(local_cmd, shell=False, stdin=subprocess.PIPE,
|
p = subprocess.Popen(local_cmd, shell=False, stdin=subprocess.PIPE,
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
|
||||||
@@ -114,42 +112,42 @@ class Connection(ConnectionBase):
|
|||||||
"""Run specified command in a running QubesVM """
|
"""Run specified command in a running QubesVM """
|
||||||
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
||||||
|
|
||||||
display.vvvv("CMD IS: %s" % cmd)
|
display.vvvv(f"CMD IS: {cmd}")
|
||||||
|
|
||||||
rc, stdout, stderr = self._qubes(cmd)
|
rc, stdout, stderr = self._qubes(cmd)
|
||||||
|
|
||||||
display.vvvvv("STDOUT %r STDERR %r" % (stderr, stderr))
|
display.vvvvv(f"STDOUT {stdout!r} STDERR {stderr!r}")
|
||||||
return rc, stdout, stderr
|
return rc, stdout, stderr
|
||||||
|
|
||||||
def put_file(self, in_path, out_path):
|
def put_file(self, in_path, out_path):
|
||||||
""" Place a local file located in 'in_path' inside VM at 'out_path' """
|
""" Place a local file located in 'in_path' inside VM at 'out_path' """
|
||||||
super(Connection, self).put_file(in_path, out_path)
|
super(Connection, self).put_file(in_path, out_path)
|
||||||
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self._remote_vmname)
|
display.vvv(f"PUT {in_path} TO {out_path}", host=self._remote_vmname)
|
||||||
|
|
||||||
with open(in_path, "rb") as fobj:
|
with open(in_path, "rb") as fobj:
|
||||||
source_data = fobj.read()
|
source_data = fobj.read()
|
||||||
|
|
||||||
retcode, dummy, dummy = self._qubes('cat > "{0}"\n'.format(out_path), source_data, "qubes.VMRootShell")
|
retcode, dummy, dummy = self._qubes(f'cat > "{out_path}\"\n', source_data, "qubes.VMRootShell")
|
||||||
# if qubes.VMRootShell service not supported, fallback to qubes.VMShell and
|
# if qubes.VMRootShell service not supported, fallback to qubes.VMShell and
|
||||||
# hope it will have appropriate permissions
|
# hope it will have appropriate permissions
|
||||||
if retcode == 127:
|
if retcode == 127:
|
||||||
retcode, dummy, dummy = self._qubes('cat > "{0}"\n'.format(out_path), source_data)
|
retcode, dummy, dummy = self._qubes(f'cat > "{out_path}\"\n', source_data)
|
||||||
|
|
||||||
if retcode != 0:
|
if retcode != 0:
|
||||||
raise AnsibleConnectionFailure('Failed to put_file to {0}'.format(out_path))
|
raise AnsibleConnectionFailure(f'Failed to put_file to {out_path}')
|
||||||
|
|
||||||
def fetch_file(self, in_path, out_path):
|
def fetch_file(self, in_path, out_path):
|
||||||
"""Obtain file specified via 'in_path' from the container and place it at 'out_path' """
|
"""Obtain file specified via 'in_path' from the container and place it at 'out_path' """
|
||||||
super(Connection, self).fetch_file(in_path, out_path)
|
super(Connection, self).fetch_file(in_path, out_path)
|
||||||
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self._remote_vmname)
|
display.vvv(f"FETCH {in_path} TO {out_path}", host=self._remote_vmname)
|
||||||
|
|
||||||
# We are running in dom0
|
# We are running in dom0
|
||||||
cmd_args_list = ["qvm-run", "--pass-io", self._remote_vmname, "cat {0}".format(in_path)]
|
cmd_args_list = ["qvm-run", "--pass-io", self._remote_vmname, f"cat {in_path}"]
|
||||||
with open(out_path, "wb") as fobj:
|
with open(out_path, "wb") as fobj:
|
||||||
p = subprocess.Popen(cmd_args_list, shell=False, stdout=fobj)
|
p = subprocess.Popen(cmd_args_list, shell=False, stdout=fobj)
|
||||||
p.communicate()
|
p.communicate()
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise AnsibleConnectionFailure('Failed to fetch file to {0}'.format(out_path))
|
raise AnsibleConnectionFailure(f'Failed to fetch file to {out_path}')
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
""" Closing the connection """
|
""" Closing the connection """
|
||||||
|
|||||||
@@ -7,16 +7,15 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Michael Scherer (@mscherer) <misc@zarb.org>
|
author: Michael Scherer (@mscherer) <misc@zarb.org>
|
||||||
name: saltstack
|
name: saltstack
|
||||||
short_description: Allow ansible to piggyback on salt minions
|
short_description: Allow ansible to piggyback on salt minions
|
||||||
description:
|
description:
|
||||||
- This allows you to use existing Saltstack infrastructure to connect to targets.
|
- This allows you to use existing Saltstack infrastructure to connect to targets.
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import base64
|
import base64
|
||||||
@@ -59,11 +58,11 @@ class Connection(ConnectionBase):
|
|||||||
if in_data:
|
if in_data:
|
||||||
raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining")
|
raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining")
|
||||||
|
|
||||||
self._display.vvv("EXEC %s" % cmd, host=self.host)
|
self._display.vvv(f"EXEC {cmd}", host=self.host)
|
||||||
# need to add 'true;' to work around https://github.com/saltstack/salt/issues/28077
|
# need to add 'true;' to work around https://github.com/saltstack/salt/issues/28077
|
||||||
res = self.client.cmd(self.host, 'cmd.exec_code_all', ['bash', 'true;' + cmd])
|
res = self.client.cmd(self.host, 'cmd.exec_code_all', ['bash', f"true;{cmd}"])
|
||||||
if self.host not in res:
|
if self.host not in res:
|
||||||
raise errors.AnsibleError("Minion %s didn't answer, check if salt-minion is running and the name is correct" % self.host)
|
raise errors.AnsibleError(f"Minion {self.host} didn't answer, check if salt-minion is running and the name is correct")
|
||||||
|
|
||||||
p = res[self.host]
|
p = res[self.host]
|
||||||
return p['retcode'], p['stdout'], p['stderr']
|
return p['retcode'], p['stdout'], p['stderr']
|
||||||
@@ -81,7 +80,7 @@ class Connection(ConnectionBase):
|
|||||||
super(Connection, self).put_file(in_path, out_path)
|
super(Connection, self).put_file(in_path, out_path)
|
||||||
|
|
||||||
out_path = self._normalize_path(out_path, '/')
|
out_path = self._normalize_path(out_path, '/')
|
||||||
self._display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
|
self._display.vvv(f"PUT {in_path} TO {out_path}", host=self.host)
|
||||||
with open(in_path, 'rb') as in_fh:
|
with open(in_path, 'rb') as in_fh:
|
||||||
content = in_fh.read()
|
content = in_fh.read()
|
||||||
self.client.cmd(self.host, 'hashutil.base64_decodefile', [base64.b64encode(content), out_path])
|
self.client.cmd(self.host, 'hashutil.base64_decodefile', [base64.b64encode(content), out_path])
|
||||||
@@ -93,7 +92,7 @@ class Connection(ConnectionBase):
|
|||||||
super(Connection, self).fetch_file(in_path, out_path)
|
super(Connection, self).fetch_file(in_path, out_path)
|
||||||
|
|
||||||
in_path = self._normalize_path(in_path, '/')
|
in_path = self._normalize_path(in_path, '/')
|
||||||
self._display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host)
|
self._display.vvv(f"FETCH {in_path} TO {out_path}", host=self.host)
|
||||||
content = self.client.cmd(self.host, 'cp.get_file_str', [in_path])[self.host]
|
content = self.client.cmd(self.host, 'cp.get_file_str', [in_path])[self.host]
|
||||||
open(out_path, 'wb').write(content)
|
open(out_path, 'wb').write(content)
|
||||||
|
|
||||||
|
|||||||
793
plugins/connection/wsl.py
Normal file
793
plugins/connection/wsl.py
Normal file
@@ -0,0 +1,793 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Derived from ansible/plugins/connection/proxmox_pct_remote.py (c) 2024 Nils Stein (@mietzen) <github.nstein@mailbox.org>
|
||||||
|
# Derived from ansible/plugins/connection/paramiko_ssh.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
||||||
|
# Copyright (c) 2025 Rui Lopes (@rgl) <ruilopes.com>
|
||||||
|
# Copyright (c) 2025 Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
DOCUMENTATION = r"""
|
||||||
|
author: Rui Lopes (@rgl) <ruilopes.com>
|
||||||
|
name: wsl
|
||||||
|
short_description: Run tasks in WSL distribution using wsl.exe CLI via SSH
|
||||||
|
requirements:
|
||||||
|
- paramiko
|
||||||
|
description:
|
||||||
|
- Run commands or put/fetch files to an existing WSL distribution using wsl.exe CLI via SSH.
|
||||||
|
- Uses the Python SSH implementation (Paramiko) to connect to the WSL host.
|
||||||
|
version_added: "10.6.0"
|
||||||
|
options:
|
||||||
|
remote_addr:
|
||||||
|
description:
|
||||||
|
- Address of the remote target.
|
||||||
|
default: inventory_hostname
|
||||||
|
type: string
|
||||||
|
vars:
|
||||||
|
- name: inventory_hostname
|
||||||
|
- name: ansible_host
|
||||||
|
- name: ansible_ssh_host
|
||||||
|
- name: ansible_paramiko_host
|
||||||
|
port:
|
||||||
|
description: Remote port to connect to.
|
||||||
|
type: int
|
||||||
|
default: 22
|
||||||
|
ini:
|
||||||
|
- section: defaults
|
||||||
|
key: remote_port
|
||||||
|
- section: paramiko_connection
|
||||||
|
key: remote_port
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_REMOTE_PORT
|
||||||
|
- name: ANSIBLE_REMOTE_PARAMIKO_PORT
|
||||||
|
vars:
|
||||||
|
- name: ansible_port
|
||||||
|
- name: ansible_ssh_port
|
||||||
|
- name: ansible_paramiko_port
|
||||||
|
keyword:
|
||||||
|
- name: port
|
||||||
|
remote_user:
|
||||||
|
description:
|
||||||
|
- User to login/authenticate as.
|
||||||
|
- Can be set from the CLI via the C(--user) or C(-u) options.
|
||||||
|
type: string
|
||||||
|
vars:
|
||||||
|
- name: ansible_user
|
||||||
|
- name: ansible_ssh_user
|
||||||
|
- name: ansible_paramiko_user
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_REMOTE_USER
|
||||||
|
- name: ANSIBLE_PARAMIKO_REMOTE_USER
|
||||||
|
ini:
|
||||||
|
- section: defaults
|
||||||
|
key: remote_user
|
||||||
|
- section: paramiko_connection
|
||||||
|
key: remote_user
|
||||||
|
keyword:
|
||||||
|
- name: remote_user
|
||||||
|
password:
|
||||||
|
description:
|
||||||
|
- Secret used to either login the SSH server or as a passphrase for SSH keys that require it.
|
||||||
|
- Can be set from the CLI via the C(--ask-pass) option.
|
||||||
|
type: string
|
||||||
|
vars:
|
||||||
|
- name: ansible_password
|
||||||
|
- name: ansible_ssh_pass
|
||||||
|
- name: ansible_ssh_password
|
||||||
|
- name: ansible_paramiko_pass
|
||||||
|
- name: ansible_paramiko_password
|
||||||
|
use_rsa_sha2_algorithms:
|
||||||
|
description:
|
||||||
|
- Whether or not to enable RSA SHA2 algorithms for pubkeys and hostkeys.
|
||||||
|
- On paramiko versions older than 2.9, this only affects hostkeys.
|
||||||
|
- For behavior matching paramiko<2.9 set this to V(false).
|
||||||
|
vars:
|
||||||
|
- name: ansible_paramiko_use_rsa_sha2_algorithms
|
||||||
|
ini:
|
||||||
|
- {key: use_rsa_sha2_algorithms, section: paramiko_connection}
|
||||||
|
env:
|
||||||
|
- {name: ANSIBLE_PARAMIKO_USE_RSA_SHA2_ALGORITHMS}
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
host_key_auto_add:
|
||||||
|
description: "Automatically add host keys to C(~/.ssh/known_hosts)."
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_PARAMIKO_HOST_KEY_AUTO_ADD
|
||||||
|
ini:
|
||||||
|
- key: host_key_auto_add
|
||||||
|
section: paramiko_connection
|
||||||
|
type: boolean
|
||||||
|
look_for_keys:
|
||||||
|
default: true
|
||||||
|
description: "Set to V(false) to disable searching for private key files in C(~/.ssh/)."
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_PARAMIKO_LOOK_FOR_KEYS
|
||||||
|
ini:
|
||||||
|
- {key: look_for_keys, section: paramiko_connection}
|
||||||
|
type: boolean
|
||||||
|
proxy_command:
|
||||||
|
default: ""
|
||||||
|
description:
|
||||||
|
- Proxy information for running the connection via a jumphost.
|
||||||
|
- This option is supported by paramiko version 1.9.0 or newer.
|
||||||
|
type: string
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_PARAMIKO_PROXY_COMMAND
|
||||||
|
ini:
|
||||||
|
- {key: proxy_command, section: paramiko_connection}
|
||||||
|
vars:
|
||||||
|
- name: ansible_paramiko_proxy_command
|
||||||
|
record_host_keys:
|
||||||
|
default: true
|
||||||
|
description: "Save the host keys to a file."
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_PARAMIKO_RECORD_HOST_KEYS
|
||||||
|
ini:
|
||||||
|
- section: paramiko_connection
|
||||||
|
key: record_host_keys
|
||||||
|
type: boolean
|
||||||
|
host_key_checking:
|
||||||
|
description: "Set this to V(false) if you want to avoid host key checking by the underlying tools Ansible uses to connect to the host."
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_HOST_KEY_CHECKING
|
||||||
|
- name: ANSIBLE_SSH_HOST_KEY_CHECKING
|
||||||
|
- name: ANSIBLE_PARAMIKO_HOST_KEY_CHECKING
|
||||||
|
ini:
|
||||||
|
- section: defaults
|
||||||
|
key: host_key_checking
|
||||||
|
- section: paramiko_connection
|
||||||
|
key: host_key_checking
|
||||||
|
vars:
|
||||||
|
- name: ansible_host_key_checking
|
||||||
|
- name: ansible_ssh_host_key_checking
|
||||||
|
- name: ansible_paramiko_host_key_checking
|
||||||
|
use_persistent_connections:
|
||||||
|
description: "Toggles the use of persistence for connections."
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_USE_PERSISTENT_CONNECTIONS
|
||||||
|
ini:
|
||||||
|
- section: defaults
|
||||||
|
key: use_persistent_connections
|
||||||
|
banner_timeout:
|
||||||
|
type: float
|
||||||
|
default: 30
|
||||||
|
description:
|
||||||
|
- Configures, in seconds, the amount of time to wait for the SSH
|
||||||
|
banner to be presented.
|
||||||
|
- This option is supported by paramiko version 1.15.0 or newer.
|
||||||
|
ini:
|
||||||
|
- section: paramiko_connection
|
||||||
|
key: banner_timeout
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_PARAMIKO_BANNER_TIMEOUT
|
||||||
|
timeout:
|
||||||
|
type: int
|
||||||
|
default: 10
|
||||||
|
description:
|
||||||
|
- Number of seconds until the plugin gives up on failing to establish a TCP connection.
|
||||||
|
- This option is supported by paramiko version 2.2.0 or newer.
|
||||||
|
ini:
|
||||||
|
- section: defaults
|
||||||
|
key: timeout
|
||||||
|
- section: ssh_connection
|
||||||
|
key: timeout
|
||||||
|
- section: paramiko_connection
|
||||||
|
key: timeout
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_TIMEOUT
|
||||||
|
- name: ANSIBLE_SSH_TIMEOUT
|
||||||
|
- name: ANSIBLE_PARAMIKO_TIMEOUT
|
||||||
|
vars:
|
||||||
|
- name: ansible_ssh_timeout
|
||||||
|
- name: ansible_paramiko_timeout
|
||||||
|
cli:
|
||||||
|
- name: timeout
|
||||||
|
lock_file_timeout:
|
||||||
|
type: int
|
||||||
|
default: 60
|
||||||
|
description: Number of seconds until the plugin gives up on trying to write a lock file when writing SSH known host keys.
|
||||||
|
vars:
|
||||||
|
- name: ansible_lock_file_timeout
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_LOCK_FILE_TIMEOUT
|
||||||
|
private_key_file:
|
||||||
|
description:
|
||||||
|
- Path to private key file to use for authentication.
|
||||||
|
type: path
|
||||||
|
ini:
|
||||||
|
- section: defaults
|
||||||
|
key: private_key_file
|
||||||
|
- section: paramiko_connection
|
||||||
|
key: private_key_file
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_PRIVATE_KEY_FILE
|
||||||
|
- name: ANSIBLE_PARAMIKO_PRIVATE_KEY_FILE
|
||||||
|
vars:
|
||||||
|
- name: ansible_private_key_file
|
||||||
|
- name: ansible_ssh_private_key_file
|
||||||
|
- name: ansible_paramiko_private_key_file
|
||||||
|
cli:
|
||||||
|
- name: private_key_file
|
||||||
|
option: "--private-key"
|
||||||
|
user_known_hosts_file:
|
||||||
|
description:
|
||||||
|
- Path to the user known hosts file.
|
||||||
|
- Used to verify the ssh hosts keys.
|
||||||
|
type: path
|
||||||
|
default: ~/.ssh/known_hosts
|
||||||
|
ini:
|
||||||
|
- section: paramiko_connection
|
||||||
|
key: user_known_hosts_file
|
||||||
|
vars:
|
||||||
|
- name: ansible_paramiko_user_known_hosts_file
|
||||||
|
wsl_distribution:
|
||||||
|
description:
|
||||||
|
- WSL distribution name
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
vars:
|
||||||
|
- name: wsl_distribution
|
||||||
|
wsl_user:
|
||||||
|
description:
|
||||||
|
- WSL distribution user
|
||||||
|
type: string
|
||||||
|
vars:
|
||||||
|
- name: wsl_user
|
||||||
|
become_user:
|
||||||
|
description:
|
||||||
|
- WSL distribution user
|
||||||
|
type: string
|
||||||
|
default: root
|
||||||
|
vars:
|
||||||
|
- name: become_user
|
||||||
|
- name: ansible_become_user
|
||||||
|
become:
|
||||||
|
description:
|
||||||
|
- whether to use the user defined by ansible_become_user.
|
||||||
|
type: bool
|
||||||
|
default: false
|
||||||
|
vars:
|
||||||
|
- name: become
|
||||||
|
- name: ansible_become
|
||||||
|
"""
|
||||||
|
|
||||||
|
EXAMPLES = r"""
|
||||||
|
# ------------------------
|
||||||
|
# Inventory: inventory.yml
|
||||||
|
# ------------------------
|
||||||
|
---
|
||||||
|
all:
|
||||||
|
children:
|
||||||
|
wsl:
|
||||||
|
hosts:
|
||||||
|
example-wsl-ubuntu:
|
||||||
|
ansible_host: 10.0.0.10
|
||||||
|
wsl_distribution: ubuntu
|
||||||
|
wsl_user: ubuntu
|
||||||
|
vars:
|
||||||
|
ansible_connection: community.general.wsl
|
||||||
|
ansible_user: vagrant
|
||||||
|
# ----------------------
|
||||||
|
# Playbook: playbook.yml
|
||||||
|
# ----------------------
|
||||||
|
---
|
||||||
|
- name: WSL Example
|
||||||
|
hosts: wsl
|
||||||
|
gather_facts: true
|
||||||
|
become: true
|
||||||
|
tasks:
|
||||||
|
- name: Ping
|
||||||
|
ansible.builtin.ping:
|
||||||
|
- name: Id (with become false)
|
||||||
|
become: false
|
||||||
|
changed_when: false
|
||||||
|
args:
|
||||||
|
executable: /bin/bash
|
||||||
|
ansible.builtin.shell: |
|
||||||
|
exec 2>&1
|
||||||
|
set -x
|
||||||
|
echo "$0"
|
||||||
|
pwd
|
||||||
|
id
|
||||||
|
- name: Id (with become true)
|
||||||
|
changed_when: false
|
||||||
|
args:
|
||||||
|
executable: /bin/bash
|
||||||
|
ansible.builtin.shell: |
|
||||||
|
exec 2>&1
|
||||||
|
set -x
|
||||||
|
echo "$0"
|
||||||
|
pwd
|
||||||
|
id
|
||||||
|
- name: Reboot
|
||||||
|
ansible.builtin.reboot:
|
||||||
|
boot_time_command: systemctl show -p ActiveEnterTimestamp init.scope
|
||||||
|
"""
|
||||||
|
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import shlex
|
||||||
|
import socket
|
||||||
|
import tempfile
|
||||||
|
import traceback
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.errors import (
|
||||||
|
AnsibleAuthenticationFailure,
|
||||||
|
AnsibleConnectionFailure,
|
||||||
|
AnsibleError,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.general.plugins.module_utils._filelock import FileLock, LockTimeout
|
||||||
|
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||||
|
from ansible.playbook.play_context import PlayContext
|
||||||
|
from ansible.plugins.connection import ConnectionBase
|
||||||
|
from ansible.utils.display import Display
|
||||||
|
from ansible.utils.path import makedirs_safe
|
||||||
|
from binascii import hexlify
|
||||||
|
from subprocess import list2cmdline
|
||||||
|
|
||||||
|
try:
|
||||||
|
import paramiko
|
||||||
|
PARAMIKO_IMPORT_ERR = None
|
||||||
|
except ImportError:
|
||||||
|
paramiko = None
|
||||||
|
PARAMIKO_IMPORT_ERR = traceback.format_exc()
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING and PARAMIKO_IMPORT_ERR is None:
|
||||||
|
from paramiko import MissingHostKeyPolicy
|
||||||
|
from paramiko.client import SSHClient
|
||||||
|
from paramiko.pkey import PKey
|
||||||
|
else:
|
||||||
|
MissingHostKeyPolicy: type = object
|
||||||
|
SSHClient: type = object
|
||||||
|
PKey: type = object
|
||||||
|
|
||||||
|
|
||||||
|
display = Display()
|
||||||
|
|
||||||
|
|
||||||
|
def authenticity_msg(hostname: str, ktype: str, fingerprint: str) -> str:
|
||||||
|
msg = f"""
|
||||||
|
paramiko: The authenticity of host '{hostname}' can't be established.
|
||||||
|
The {ktype} key fingerprint is {fingerprint}.
|
||||||
|
Are you sure you want to continue connecting (yes/no)?
|
||||||
|
"""
|
||||||
|
return msg
|
||||||
|
|
||||||
|
|
||||||
|
class MyAddPolicy(MissingHostKeyPolicy):
|
||||||
|
"""
|
||||||
|
Based on AutoAddPolicy in paramiko so we can determine when keys are added
|
||||||
|
|
||||||
|
and also prompt for input.
|
||||||
|
|
||||||
|
Policy for automatically adding the hostname and new host key to the
|
||||||
|
local L{HostKeys} object, and saving it. This is used by L{SSHClient}.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, connection: Connection) -> None:
|
||||||
|
self.connection = connection
|
||||||
|
self._options = connection._options
|
||||||
|
|
||||||
|
def missing_host_key(self, client: SSHClient, hostname: str, key: PKey) -> None:
|
||||||
|
|
||||||
|
if all((self.connection.get_option('host_key_checking'), not self.connection.get_option('host_key_auto_add'))):
|
||||||
|
|
||||||
|
fingerprint = hexlify(key.get_fingerprint())
|
||||||
|
ktype = key.get_name()
|
||||||
|
|
||||||
|
if self.connection.get_option('use_persistent_connections') or self.connection.force_persistence:
|
||||||
|
# don't print the prompt string since the user cannot respond
|
||||||
|
# to the question anyway
|
||||||
|
raise AnsibleError(authenticity_msg(hostname, ktype, fingerprint)[1:92])
|
||||||
|
|
||||||
|
inp = to_text(
|
||||||
|
display.prompt_until(authenticity_msg(hostname, ktype, fingerprint), private=False),
|
||||||
|
errors='surrogate_or_strict'
|
||||||
|
)
|
||||||
|
|
||||||
|
if inp.lower() not in ['yes', 'y', '']:
|
||||||
|
raise AnsibleError('host connection rejected by user')
|
||||||
|
|
||||||
|
key._added_by_ansible_this_time = True
|
||||||
|
|
||||||
|
# existing implementation below:
|
||||||
|
client._host_keys.add(hostname, key.get_name(), key)
|
||||||
|
|
||||||
|
# host keys are actually saved in close() function below
|
||||||
|
# in order to control ordering.
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ConnectionBase):
|
||||||
|
""" SSH based connections (paramiko) to WSL """
|
||||||
|
|
||||||
|
transport = 'community.general.wsl'
|
||||||
|
_log_channel: str | None = None
|
||||||
|
|
||||||
|
def __init__(self, play_context: PlayContext, new_stdin: io.TextIOWrapper | None = None, *args: t.Any, **kwargs: t.Any):
|
||||||
|
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
|
||||||
|
|
||||||
|
def _set_log_channel(self, name: str) -> None:
|
||||||
|
""" Mimic paramiko.SSHClient.set_log_channel """
|
||||||
|
self._log_channel = name
|
||||||
|
|
||||||
|
def _parse_proxy_command(self, port: int = 22) -> dict[str, t.Any]:
|
||||||
|
proxy_command = self.get_option('proxy_command') or None
|
||||||
|
|
||||||
|
sock_kwarg = {}
|
||||||
|
if proxy_command:
|
||||||
|
replacers: t.Dict[str, str] = {
|
||||||
|
'%h': self.get_option('remote_addr'),
|
||||||
|
'%p': str(port),
|
||||||
|
'%r': self.get_option('remote_user')
|
||||||
|
}
|
||||||
|
for find, replace in replacers.items():
|
||||||
|
proxy_command = proxy_command.replace(find, replace)
|
||||||
|
try:
|
||||||
|
sock_kwarg = {'sock': paramiko.ProxyCommand(proxy_command)}
|
||||||
|
display.vvv(f'CONFIGURE PROXY COMMAND FOR CONNECTION: {proxy_command}', host=self.get_option('remote_addr'))
|
||||||
|
except AttributeError:
|
||||||
|
display.warning('Paramiko ProxyCommand support unavailable. '
|
||||||
|
'Please upgrade to Paramiko 1.9.0 or newer. '
|
||||||
|
'Not using configured ProxyCommand')
|
||||||
|
|
||||||
|
return sock_kwarg
|
||||||
|
|
||||||
|
def _connect(self) -> Connection:
|
||||||
|
""" activates the connection object """
|
||||||
|
|
||||||
|
if PARAMIKO_IMPORT_ERR is not None:
|
||||||
|
raise AnsibleError(f'paramiko is not installed: {to_native(PARAMIKO_IMPORT_ERR)}')
|
||||||
|
|
||||||
|
port = self.get_option('port')
|
||||||
|
display.vvv(f'ESTABLISH PARAMIKO SSH CONNECTION FOR USER: {self.get_option("remote_user")} on PORT {to_text(port)} TO {self.get_option("remote_addr")}',
|
||||||
|
host=self.get_option('remote_addr'))
|
||||||
|
|
||||||
|
ssh = paramiko.SSHClient()
|
||||||
|
|
||||||
|
# Set pubkey and hostkey algorithms to disable, the only manipulation allowed currently
|
||||||
|
# is keeping or omitting rsa-sha2 algorithms
|
||||||
|
# default_keys: t.Tuple[str] = ()
|
||||||
|
paramiko_preferred_pubkeys = getattr(paramiko.Transport, '_preferred_pubkeys', ())
|
||||||
|
paramiko_preferred_hostkeys = getattr(paramiko.Transport, '_preferred_keys', ())
|
||||||
|
use_rsa_sha2_algorithms = self.get_option('use_rsa_sha2_algorithms')
|
||||||
|
disabled_algorithms: t.Dict[str, t.Iterable[str]] = {}
|
||||||
|
if not use_rsa_sha2_algorithms:
|
||||||
|
if paramiko_preferred_pubkeys:
|
||||||
|
disabled_algorithms['pubkeys'] = tuple(a for a in paramiko_preferred_pubkeys if 'rsa-sha2' in a)
|
||||||
|
if paramiko_preferred_hostkeys:
|
||||||
|
disabled_algorithms['keys'] = tuple(a for a in paramiko_preferred_hostkeys if 'rsa-sha2' in a)
|
||||||
|
|
||||||
|
# override paramiko's default logger name
|
||||||
|
if self._log_channel is not None:
|
||||||
|
ssh.set_log_channel(self._log_channel)
|
||||||
|
|
||||||
|
self.keyfile = os.path.expanduser(self.get_option('user_known_hosts_file'))
|
||||||
|
|
||||||
|
if self.get_option('host_key_checking'):
|
||||||
|
for ssh_known_hosts in ('/etc/ssh/ssh_known_hosts', '/etc/openssh/ssh_known_hosts', self.keyfile):
|
||||||
|
try:
|
||||||
|
ssh.load_system_host_keys(ssh_known_hosts)
|
||||||
|
break
|
||||||
|
except IOError:
|
||||||
|
pass # file was not found, but not required to function
|
||||||
|
except paramiko.hostkeys.InvalidHostKey as e:
|
||||||
|
raise AnsibleConnectionFailure(f'Invalid host key: {to_text(e.line)}')
|
||||||
|
try:
|
||||||
|
ssh.load_system_host_keys()
|
||||||
|
except paramiko.hostkeys.InvalidHostKey as e:
|
||||||
|
raise AnsibleConnectionFailure(f'Invalid host key: {to_text(e.line)}')
|
||||||
|
|
||||||
|
ssh_connect_kwargs = self._parse_proxy_command(port)
|
||||||
|
ssh.set_missing_host_key_policy(MyAddPolicy(self))
|
||||||
|
conn_password = self.get_option('password')
|
||||||
|
allow_agent = True
|
||||||
|
|
||||||
|
if conn_password is not None:
|
||||||
|
allow_agent = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
key_filename = None
|
||||||
|
if self.get_option('private_key_file'):
|
||||||
|
key_filename = os.path.expanduser(self.get_option('private_key_file'))
|
||||||
|
|
||||||
|
# paramiko 2.2 introduced auth_timeout parameter
|
||||||
|
if LooseVersion(paramiko.__version__) >= LooseVersion('2.2.0'):
|
||||||
|
ssh_connect_kwargs['auth_timeout'] = self.get_option('timeout')
|
||||||
|
|
||||||
|
# paramiko 1.15 introduced banner timeout parameter
|
||||||
|
if LooseVersion(paramiko.__version__) >= LooseVersion('1.15.0'):
|
||||||
|
ssh_connect_kwargs['banner_timeout'] = self.get_option('banner_timeout')
|
||||||
|
|
||||||
|
ssh.connect(
|
||||||
|
self.get_option('remote_addr').lower(),
|
||||||
|
username=self.get_option('remote_user'),
|
||||||
|
allow_agent=allow_agent,
|
||||||
|
look_for_keys=self.get_option('look_for_keys'),
|
||||||
|
key_filename=key_filename,
|
||||||
|
password=conn_password,
|
||||||
|
timeout=self.get_option('timeout'),
|
||||||
|
port=port,
|
||||||
|
disabled_algorithms=disabled_algorithms,
|
||||||
|
**ssh_connect_kwargs,
|
||||||
|
)
|
||||||
|
except paramiko.ssh_exception.BadHostKeyException as e:
|
||||||
|
raise AnsibleConnectionFailure(f'host key mismatch for {to_text(e.hostname)}')
|
||||||
|
except paramiko.ssh_exception.AuthenticationException as e:
|
||||||
|
msg = f'Failed to authenticate: {e}'
|
||||||
|
raise AnsibleAuthenticationFailure(msg)
|
||||||
|
except Exception as e:
|
||||||
|
msg = to_text(e)
|
||||||
|
if u'PID check failed' in msg:
|
||||||
|
raise AnsibleError('paramiko version issue, please upgrade paramiko on the machine running ansible')
|
||||||
|
elif u'Private key file is encrypted' in msg:
|
||||||
|
msg = f'ssh {self.get_option("remote_user")}@{self.get_options("remote_addr")}:{port} : ' + \
|
||||||
|
f'{msg}\nTo connect as a different user, use -u <username>.'
|
||||||
|
raise AnsibleConnectionFailure(msg)
|
||||||
|
else:
|
||||||
|
raise AnsibleConnectionFailure(msg)
|
||||||
|
self.ssh = ssh
|
||||||
|
self._connected = True
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _any_keys_added(self) -> bool:
|
||||||
|
for hostname, keys in self.ssh._host_keys.items():
|
||||||
|
for keytype, key in keys.items():
|
||||||
|
added_this_time = getattr(key, '_added_by_ansible_this_time', False)
|
||||||
|
if added_this_time:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _save_ssh_host_keys(self, filename: str) -> None:
|
||||||
|
"""
|
||||||
|
not using the paramiko save_ssh_host_keys function as we want to add new SSH keys at the bottom so folks
|
||||||
|
don't complain about it :)
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not self._any_keys_added():
|
||||||
|
return
|
||||||
|
|
||||||
|
path = os.path.expanduser('~/.ssh')
|
||||||
|
makedirs_safe(path)
|
||||||
|
|
||||||
|
with open(filename, 'w') as f:
|
||||||
|
for hostname, keys in self.ssh._host_keys.items():
|
||||||
|
for keytype, key in keys.items():
|
||||||
|
# was f.write
|
||||||
|
added_this_time = getattr(key, '_added_by_ansible_this_time', False)
|
||||||
|
if not added_this_time:
|
||||||
|
f.write(f'{hostname} {keytype} {key.get_base64()}\n')
|
||||||
|
|
||||||
|
for hostname, keys in self.ssh._host_keys.items():
|
||||||
|
for keytype, key in keys.items():
|
||||||
|
added_this_time = getattr(key, '_added_by_ansible_this_time', False)
|
||||||
|
if added_this_time:
|
||||||
|
f.write(f'{hostname} {keytype} {key.get_base64()}\n')
|
||||||
|
|
||||||
|
def _build_wsl_command(self, cmd: str) -> str:
|
||||||
|
wsl_distribution = self.get_option('wsl_distribution')
|
||||||
|
become = self.get_option('become')
|
||||||
|
become_user = self.get_option('become_user')
|
||||||
|
if become and become_user:
|
||||||
|
wsl_user = become_user
|
||||||
|
else:
|
||||||
|
wsl_user = self.get_option('wsl_user')
|
||||||
|
args = ['wsl.exe', '--distribution', wsl_distribution]
|
||||||
|
if wsl_user:
|
||||||
|
args.extend(['--user', wsl_user])
|
||||||
|
args.extend(['--'])
|
||||||
|
args.extend(shlex.split(cmd))
|
||||||
|
if os.getenv('_ANSIBLE_TEST_WSL_CONNECTION_PLUGIN_Waeri5tepheeSha2fae8'):
|
||||||
|
return shlex.join(args)
|
||||||
|
return list2cmdline(args) # see https://github.com/python/cpython/blob/3.11/Lib/subprocess.py#L576
|
||||||
|
|
||||||
|
def exec_command(self, cmd: str, in_data: bytes | None = None, sudoable: bool = True) -> tuple[int, bytes, bytes]:
|
||||||
|
""" run a command on inside a WSL distribution """
|
||||||
|
|
||||||
|
cmd = self._build_wsl_command(cmd)
|
||||||
|
|
||||||
|
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
||||||
|
|
||||||
|
bufsize = 4096
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.ssh.get_transport().set_keepalive(5)
|
||||||
|
chan = self.ssh.get_transport().open_session()
|
||||||
|
except Exception as e:
|
||||||
|
text_e = to_text(e)
|
||||||
|
msg = 'Failed to open session'
|
||||||
|
if text_e:
|
||||||
|
msg += f': {text_e}'
|
||||||
|
raise AnsibleConnectionFailure(to_native(msg))
|
||||||
|
|
||||||
|
display.vvv(f'EXEC {cmd}', host=self.get_option('remote_addr'))
|
||||||
|
|
||||||
|
cmd = to_bytes(cmd, errors='surrogate_or_strict')
|
||||||
|
|
||||||
|
no_prompt_out = b''
|
||||||
|
no_prompt_err = b''
|
||||||
|
become_output = b''
|
||||||
|
|
||||||
|
try:
|
||||||
|
chan.exec_command(cmd)
|
||||||
|
if self.become and self.become.expect_prompt():
|
||||||
|
password_prompt = False
|
||||||
|
become_success = False
|
||||||
|
while not (become_success or password_prompt):
|
||||||
|
display.debug('Waiting for Privilege Escalation input')
|
||||||
|
|
||||||
|
chunk = chan.recv(bufsize)
|
||||||
|
display.debug(f'chunk is: {to_text(chunk)}')
|
||||||
|
if not chunk:
|
||||||
|
if b'unknown user' in become_output:
|
||||||
|
n_become_user = to_native(self.become.get_option('become_user'))
|
||||||
|
raise AnsibleError(f'user {n_become_user} does not exist')
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
# raise AnsibleError('ssh connection closed waiting for password prompt')
|
||||||
|
become_output += chunk
|
||||||
|
|
||||||
|
# need to check every line because we might get lectured
|
||||||
|
# and we might get the middle of a line in a chunk
|
||||||
|
for line in become_output.splitlines(True):
|
||||||
|
if self.become.check_success(line):
|
||||||
|
become_success = True
|
||||||
|
break
|
||||||
|
elif self.become.check_password_prompt(line):
|
||||||
|
password_prompt = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if password_prompt:
|
||||||
|
if self.become:
|
||||||
|
become_pass = self.become.get_option('become_pass')
|
||||||
|
chan.sendall(to_bytes(become_pass + '\n', errors='surrogate_or_strict'))
|
||||||
|
else:
|
||||||
|
raise AnsibleError('A password is required but none was supplied')
|
||||||
|
else:
|
||||||
|
no_prompt_out += become_output
|
||||||
|
no_prompt_err += become_output
|
||||||
|
|
||||||
|
if in_data:
|
||||||
|
for i in range(0, len(in_data), bufsize):
|
||||||
|
chan.send(in_data[i:i + bufsize])
|
||||||
|
chan.shutdown_write()
|
||||||
|
elif in_data == b'':
|
||||||
|
chan.shutdown_write()
|
||||||
|
|
||||||
|
except socket.timeout:
|
||||||
|
raise AnsibleError('ssh timed out waiting for privilege escalation.\n' + to_text(become_output))
|
||||||
|
|
||||||
|
stdout = b''.join(chan.makefile('rb', bufsize))
|
||||||
|
stderr = b''.join(chan.makefile_stderr('rb', bufsize))
|
||||||
|
returncode = chan.recv_exit_status()
|
||||||
|
|
||||||
|
# NB the full english error message is:
|
||||||
|
# 'wsl.exe' is not recognized as an internal or external command,
|
||||||
|
# operable program or batch file.
|
||||||
|
if "'wsl.exe' is not recognized" in stderr.decode('utf-8'):
|
||||||
|
raise AnsibleError(
|
||||||
|
f'wsl.exe not found in path of host: {to_text(self.get_option("remote_addr"))}')
|
||||||
|
|
||||||
|
return (returncode, no_prompt_out + stdout, no_prompt_out + stderr)
|
||||||
|
|
||||||
|
def put_file(self, in_path: str, out_path: str) -> None:
|
||||||
|
""" transfer a file from local to remote """
|
||||||
|
|
||||||
|
display.vvv(f'PUT {in_path} TO {out_path}', host=self.get_option('remote_addr'))
|
||||||
|
try:
|
||||||
|
with open(in_path, 'rb') as f:
|
||||||
|
data = f.read()
|
||||||
|
returncode, stdout, stderr = self.exec_command(
|
||||||
|
' '.join([
|
||||||
|
self._shell.executable, '-c',
|
||||||
|
self._shell.quote(f'cat > {out_path}')]),
|
||||||
|
in_data=data,
|
||||||
|
sudoable=False)
|
||||||
|
if returncode != 0:
|
||||||
|
if 'cat: not found' in stderr.decode('utf-8'):
|
||||||
|
raise AnsibleError(
|
||||||
|
f'cat not found in path of WSL distribution: {to_text(self.get_option("wsl_distribution"))}')
|
||||||
|
raise AnsibleError(
|
||||||
|
f'{to_text(stdout)}\n{to_text(stderr)}')
|
||||||
|
except Exception as e:
|
||||||
|
raise AnsibleError(
|
||||||
|
f'error occurred while putting file from {in_path} to {out_path}!\n{to_text(e)}')
|
||||||
|
|
||||||
|
def fetch_file(self, in_path: str, out_path: str) -> None:
|
||||||
|
""" save a remote file to the specified path """
|
||||||
|
|
||||||
|
display.vvv(f'FETCH {in_path} TO {out_path}', host=self.get_option('remote_addr'))
|
||||||
|
try:
|
||||||
|
returncode, stdout, stderr = self.exec_command(
|
||||||
|
' '.join([
|
||||||
|
self._shell.executable, '-c',
|
||||||
|
self._shell.quote(f'cat {in_path}')]),
|
||||||
|
sudoable=False)
|
||||||
|
if returncode != 0:
|
||||||
|
if 'cat: not found' in stderr.decode('utf-8'):
|
||||||
|
raise AnsibleError(
|
||||||
|
f'cat not found in path of WSL distribution: {to_text(self.get_option("wsl_distribution"))}')
|
||||||
|
raise AnsibleError(
|
||||||
|
f'{to_text(stdout)}\n{to_text(stderr)}')
|
||||||
|
with open(out_path, 'wb') as f:
|
||||||
|
f.write(stdout)
|
||||||
|
except Exception as e:
|
||||||
|
raise AnsibleError(
|
||||||
|
f'error occurred while fetching file from {in_path} to {out_path}!\n{to_text(e)}')
|
||||||
|
|
||||||
|
def reset(self) -> None:
|
||||||
|
""" reset the connection """
|
||||||
|
|
||||||
|
if not self._connected:
|
||||||
|
return
|
||||||
|
self.close()
|
||||||
|
self._connect()
|
||||||
|
|
||||||
|
def close(self) -> None:
|
||||||
|
""" terminate the connection """
|
||||||
|
|
||||||
|
if self.get_option('host_key_checking') and self.get_option('record_host_keys') and self._any_keys_added():
|
||||||
|
# add any new SSH host keys -- warning -- this could be slow
|
||||||
|
# (This doesn't acquire the connection lock because it needs
|
||||||
|
# to exclude only other known_hosts writers, not connections
|
||||||
|
# that are starting up.)
|
||||||
|
lockfile = os.path.basename(self.keyfile)
|
||||||
|
dirname = os.path.dirname(self.keyfile)
|
||||||
|
makedirs_safe(dirname)
|
||||||
|
tmp_keyfile_name = None
|
||||||
|
try:
|
||||||
|
with FileLock().lock_file(lockfile, dirname, self.get_option('lock_file_timeout')):
|
||||||
|
# just in case any were added recently
|
||||||
|
|
||||||
|
self.ssh.load_system_host_keys()
|
||||||
|
self.ssh._host_keys.update(self.ssh._system_host_keys)
|
||||||
|
|
||||||
|
# gather information about the current key file, so
|
||||||
|
# we can ensure the new file has the correct mode/owner
|
||||||
|
|
||||||
|
key_dir = os.path.dirname(self.keyfile)
|
||||||
|
if os.path.exists(self.keyfile):
|
||||||
|
key_stat = os.stat(self.keyfile)
|
||||||
|
mode = key_stat.st_mode & 0o777
|
||||||
|
uid = key_stat.st_uid
|
||||||
|
gid = key_stat.st_gid
|
||||||
|
else:
|
||||||
|
mode = 0o644
|
||||||
|
uid = os.getuid()
|
||||||
|
gid = os.getgid()
|
||||||
|
|
||||||
|
# Save the new keys to a temporary file and move it into place
|
||||||
|
# rather than rewriting the file. We set delete=False because
|
||||||
|
# the file will be moved into place rather than cleaned up.
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile(dir=key_dir, delete=False) as tmp_keyfile:
|
||||||
|
tmp_keyfile_name = tmp_keyfile.name
|
||||||
|
os.chmod(tmp_keyfile_name, mode)
|
||||||
|
os.chown(tmp_keyfile_name, uid, gid)
|
||||||
|
self._save_ssh_host_keys(tmp_keyfile_name)
|
||||||
|
|
||||||
|
os.rename(tmp_keyfile_name, self.keyfile)
|
||||||
|
except LockTimeout:
|
||||||
|
raise AnsibleError(
|
||||||
|
f'writing lock file for {self.keyfile} ran in to the timeout of {self.get_option("lock_file_timeout")}s')
|
||||||
|
except paramiko.hostkeys.InvalidHostKey as e:
|
||||||
|
raise AnsibleConnectionFailure(f'Invalid host key: {e.line}')
|
||||||
|
except Exception as e:
|
||||||
|
# unable to save keys, including scenario when key was invalid
|
||||||
|
# and caught earlier
|
||||||
|
raise AnsibleError(
|
||||||
|
f'error occurred while writing SSH host keys!\n{to_text(e)}')
|
||||||
|
finally:
|
||||||
|
if tmp_keyfile_name is not None:
|
||||||
|
pathlib.Path(tmp_keyfile_name).unlink(missing_ok=True)
|
||||||
|
|
||||||
|
self.ssh.close()
|
||||||
|
self._connected = False
|
||||||
@@ -8,33 +8,32 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = r"""
|
||||||
author: Ansible Core Team
|
author: Ansible Core Team
|
||||||
name: zone
|
name: zone
|
||||||
short_description: Run tasks in a zone instance
|
short_description: Run tasks in a zone instance
|
||||||
|
description:
|
||||||
|
- Run commands or put/fetch files to an existing zone.
|
||||||
|
options:
|
||||||
|
remote_addr:
|
||||||
description:
|
description:
|
||||||
- Run commands or put/fetch files to an existing zone.
|
- Zone identifier.
|
||||||
options:
|
type: string
|
||||||
remote_addr:
|
default: inventory_hostname
|
||||||
description:
|
vars:
|
||||||
- Zone identifier
|
- name: ansible_host
|
||||||
type: string
|
- name: ansible_zone_host
|
||||||
default: inventory_hostname
|
"""
|
||||||
vars:
|
|
||||||
- name: ansible_host
|
|
||||||
- name: ansible_zone_host
|
|
||||||
'''
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import subprocess
|
import subprocess
|
||||||
import traceback
|
import traceback
|
||||||
|
from shlex import quote as shlex_quote
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils.six.moves import shlex_quote
|
|
||||||
from ansible.module_utils.common.process import get_bin_path
|
from ansible.module_utils.common.process import get_bin_path
|
||||||
from ansible.module_utils.common.text.converters import to_bytes
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
from ansible.plugins.connection import ConnectionBase, BUFSIZE
|
from ansible.plugins.connection import ConnectionBase, BUFSIZE
|
||||||
@@ -62,14 +61,14 @@ class Connection(ConnectionBase):
|
|||||||
self.zlogin_cmd = to_bytes(self._search_executable('zlogin'))
|
self.zlogin_cmd = to_bytes(self._search_executable('zlogin'))
|
||||||
|
|
||||||
if self.zone not in self.list_zones():
|
if self.zone not in self.list_zones():
|
||||||
raise AnsibleError("incorrect zone name %s" % self.zone)
|
raise AnsibleError(f"incorrect zone name {self.zone}")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _search_executable(executable):
|
def _search_executable(executable):
|
||||||
try:
|
try:
|
||||||
return get_bin_path(executable)
|
return get_bin_path(executable)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise AnsibleError("%s command not found in PATH" % executable)
|
raise AnsibleError(f"{executable} command not found in PATH")
|
||||||
|
|
||||||
def list_zones(self):
|
def list_zones(self):
|
||||||
process = subprocess.Popen([self.zoneadm_cmd, 'list', '-ip'],
|
process = subprocess.Popen([self.zoneadm_cmd, 'list', '-ip'],
|
||||||
@@ -94,7 +93,7 @@ class Connection(ConnectionBase):
|
|||||||
|
|
||||||
# stdout, stderr = p.communicate()
|
# stdout, stderr = p.communicate()
|
||||||
path = process.stdout.readlines()[0].split(':')[3]
|
path = process.stdout.readlines()[0].split(':')[3]
|
||||||
return path + '/root'
|
return f"{path}/root"
|
||||||
|
|
||||||
def _connect(self):
|
def _connect(self):
|
||||||
""" connect to the zone; nothing to do here """
|
""" connect to the zone; nothing to do here """
|
||||||
@@ -117,7 +116,7 @@ class Connection(ConnectionBase):
|
|||||||
local_cmd = [self.zlogin_cmd, self.zone, cmd]
|
local_cmd = [self.zlogin_cmd, self.zone, cmd]
|
||||||
local_cmd = map(to_bytes, local_cmd)
|
local_cmd = map(to_bytes, local_cmd)
|
||||||
|
|
||||||
display.vvv("EXEC %s" % (local_cmd), host=self.zone)
|
display.vvv(f"EXEC {local_cmd}", host=self.zone)
|
||||||
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
|
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
|
||||||
@@ -140,7 +139,7 @@ class Connection(ConnectionBase):
|
|||||||
exist in any given chroot. So for now we're choosing "/" instead.
|
exist in any given chroot. So for now we're choosing "/" instead.
|
||||||
This also happens to be the former default.
|
This also happens to be the former default.
|
||||||
|
|
||||||
Can revisit using $HOME instead if it's a problem
|
Can revisit using $HOME instead if it is a problem
|
||||||
"""
|
"""
|
||||||
if not remote_path.startswith(os.path.sep):
|
if not remote_path.startswith(os.path.sep):
|
||||||
remote_path = os.path.join(os.path.sep, remote_path)
|
remote_path = os.path.join(os.path.sep, remote_path)
|
||||||
@@ -149,7 +148,7 @@ class Connection(ConnectionBase):
|
|||||||
def put_file(self, in_path, out_path):
|
def put_file(self, in_path, out_path):
|
||||||
""" transfer a file from local to zone """
|
""" transfer a file from local to zone """
|
||||||
super(Connection, self).put_file(in_path, out_path)
|
super(Connection, self).put_file(in_path, out_path)
|
||||||
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.zone)
|
display.vvv(f"PUT {in_path} TO {out_path}", host=self.zone)
|
||||||
|
|
||||||
out_path = shlex_quote(self._prefix_login_path(out_path))
|
out_path = shlex_quote(self._prefix_login_path(out_path))
|
||||||
try:
|
try:
|
||||||
@@ -159,27 +158,27 @@ class Connection(ConnectionBase):
|
|||||||
else:
|
else:
|
||||||
count = ''
|
count = ''
|
||||||
try:
|
try:
|
||||||
p = self._buffered_exec_command('dd of=%s bs=%s%s' % (out_path, BUFSIZE, count), stdin=in_file)
|
p = self._buffered_exec_command(f'dd of={out_path} bs={BUFSIZE}{count}', stdin=in_file)
|
||||||
except OSError:
|
except OSError:
|
||||||
raise AnsibleError("jail connection requires dd command in the jail")
|
raise AnsibleError("jail connection requires dd command in the jail")
|
||||||
try:
|
try:
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
except Exception:
|
except Exception:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
|
||||||
except IOError:
|
except IOError:
|
||||||
raise AnsibleError("file or module does not exist at: %s" % in_path)
|
raise AnsibleError(f"file or module does not exist at: {in_path}")
|
||||||
|
|
||||||
def fetch_file(self, in_path, out_path):
|
def fetch_file(self, in_path, out_path):
|
||||||
""" fetch a file from zone to local """
|
""" fetch a file from zone to local """
|
||||||
super(Connection, self).fetch_file(in_path, out_path)
|
super(Connection, self).fetch_file(in_path, out_path)
|
||||||
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.zone)
|
display.vvv(f"FETCH {in_path} TO {out_path}", host=self.zone)
|
||||||
|
|
||||||
in_path = shlex_quote(self._prefix_login_path(in_path))
|
in_path = shlex_quote(self._prefix_login_path(in_path))
|
||||||
try:
|
try:
|
||||||
p = self._buffered_exec_command('dd if=%s bs=%s' % (in_path, BUFSIZE))
|
p = self._buffered_exec_command(f'dd if={in_path} bs={BUFSIZE}')
|
||||||
except OSError:
|
except OSError:
|
||||||
raise AnsibleError("zone connection requires dd command in the zone")
|
raise AnsibleError("zone connection requires dd command in the zone")
|
||||||
|
|
||||||
@@ -191,10 +190,10 @@ class Connection(ConnectionBase):
|
|||||||
chunk = p.stdout.read(BUFSIZE)
|
chunk = p.stdout.read(BUFSIZE)
|
||||||
except Exception:
|
except Exception:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
|
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
""" terminate the connection; nothing to do here """
|
""" terminate the connection; nothing to do here """
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user