mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-04-29 09:56:53 +00:00
Compare commits
333 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
069b785cb2 | ||
|
|
090d3f3709 | ||
|
|
68a9b66966 | ||
|
|
f96c6476fe | ||
|
|
fc0f677535 | ||
|
|
9a986473bd | ||
|
|
e9f0e49283 | ||
|
|
5eff31e760 | ||
|
|
39c58d5469 | ||
|
|
20d7be4f38 | ||
|
|
2d26fba0b9 | ||
|
|
d6168a196b | ||
|
|
02de81c39e | ||
|
|
4096b9fa5a | ||
|
|
fe3a3a7638 | ||
|
|
7cac741e77 | ||
|
|
f84ebed63f | ||
|
|
f905a1bc94 | ||
|
|
b0470f2e59 | ||
|
|
42175e38b2 | ||
|
|
8e79844b75 | ||
|
|
1338db358a | ||
|
|
06c4439a1c | ||
|
|
16d5d5fc57 | ||
|
|
71af3226f3 | ||
|
|
b3037a46be | ||
|
|
f7df19adbd | ||
|
|
3bca21aa1b | ||
|
|
1bb3d41e15 | ||
|
|
f214f206c3 | ||
|
|
9b8011d692 | ||
|
|
f227038f38 | ||
|
|
86a2996814 | ||
|
|
eb154003cf | ||
|
|
212871fcaf | ||
|
|
1795a67b8e | ||
|
|
a71c0af9cc | ||
|
|
569cde6c3e | ||
|
|
f0db1d1f6b | ||
|
|
5a36e84b86 | ||
|
|
a74c6db77f | ||
|
|
9a14980ca7 | ||
|
|
8c9effce1f | ||
|
|
51ec3594dd | ||
|
|
802f8ea224 | ||
|
|
23af148021 | ||
|
|
1a2c2d0a64 | ||
|
|
80243f8180 | ||
|
|
13b5c4092a | ||
|
|
9b0c983860 | ||
|
|
704a5acc63 | ||
|
|
861f55eb04 | ||
|
|
bebe162a22 | ||
|
|
f82e7a7b83 | ||
|
|
03240ad7dc | ||
|
|
d87b9fe0dc | ||
|
|
5f481939d4 | ||
|
|
70c78c1d71 | ||
|
|
0350a631de | ||
|
|
da8b133a73 | ||
|
|
a409f8fc2f | ||
|
|
bb73f28bf5 | ||
|
|
cd01a928ab | ||
|
|
1ac94b5f44 | ||
|
|
6889e0478d | ||
|
|
fabf6263f1 | ||
|
|
7dd7cbdba8 | ||
|
|
7f4f066e86 | ||
|
|
4f4075a542 | ||
|
|
7aa118b957 | ||
|
|
b774435d8d | ||
|
|
a71e19130d | ||
|
|
d347bf5fa0 | ||
|
|
3b7f13c58e | ||
|
|
136419c5c0 | ||
|
|
bc7ad0f0ea | ||
|
|
cb985b31f9 | ||
|
|
feb443d260 | ||
|
|
bc609d74a0 | ||
|
|
4bd68ac153 | ||
|
|
d75dee3230 | ||
|
|
3eeafecd1f | ||
|
|
ea719649bb | ||
|
|
70adba8991 | ||
|
|
b48293ca31 | ||
|
|
85f9d89510 | ||
|
|
7051fe3449 | ||
|
|
fc2024d837 | ||
|
|
45c2e0f8d0 | ||
|
|
62138b288a | ||
|
|
be3b66c8b5 | ||
|
|
17e11d7d7e | ||
|
|
211688ef1b | ||
|
|
af1c5dd785 | ||
|
|
a5697da29c | ||
|
|
0735656319 | ||
|
|
8f98ba9119 | ||
|
|
a05a5982a6 | ||
|
|
be11d0d409 | ||
|
|
9d66a1dc1e | ||
|
|
f55342d8af | ||
|
|
486c26b224 | ||
|
|
be4d5b7dc4 | ||
|
|
865de5baa0 | ||
|
|
7fd37ea247 | ||
|
|
524d5883b8 | ||
|
|
1b8e6bc95b | ||
|
|
1bbef58844 | ||
|
|
a5b2b5ce8c | ||
|
|
12b76ead29 | ||
|
|
da29ea151d | ||
|
|
bafad8ecd4 | ||
|
|
6c8f949ba9 | ||
|
|
9307b76e74 | ||
|
|
8491bf7b49 | ||
|
|
39ef949f27 | ||
|
|
b674f94f64 | ||
|
|
bc2ff24f74 | ||
|
|
610ecf9bf5 | ||
|
|
13d0310e91 | ||
|
|
e4e091acca | ||
|
|
48b5a7a80a | ||
|
|
b444e8739c | ||
|
|
b463571902 | ||
|
|
b2b8fc30bf | ||
|
|
4f758bfb84 | ||
|
|
90c9f20ef8 | ||
|
|
609f28f791 | ||
|
|
d62fe154d2 | ||
|
|
b389f8637f | ||
|
|
795a855d0e | ||
|
|
a4b32d7b9c | ||
|
|
f5fa16c881 | ||
|
|
9f5193e40b | ||
|
|
23396e62dc | ||
|
|
4363f8764b | ||
|
|
4947786d36 | ||
|
|
fb67df3051 | ||
|
|
da048aa12e | ||
|
|
47b4cf766e | ||
|
|
69ab5eb110 | ||
|
|
6298ad4faa | ||
|
|
73b6b98ed9 | ||
|
|
1c4197aa23 | ||
|
|
23fbc5e241 | ||
|
|
09cded05e7 | ||
|
|
67736d796a | ||
|
|
226207522e | ||
|
|
17e275bc0b | ||
|
|
6fab46710a | ||
|
|
79d87552ef | ||
|
|
c13bede0c5 | ||
|
|
0ded1109fe | ||
|
|
fa30b02294 | ||
|
|
98df344017 | ||
|
|
a50329d0d5 | ||
|
|
74c15c1241 | ||
|
|
248e2ff321 | ||
|
|
05bf5ee1df | ||
|
|
adb367a6af | ||
|
|
2140485148 | ||
|
|
1b0d55fe31 | ||
|
|
787fa46217 | ||
|
|
f6d0b35bb7 | ||
|
|
6cafd3bed7 | ||
|
|
e0dbe9c98d | ||
|
|
638a7fc199 | ||
|
|
b5c3361be4 | ||
|
|
dd7c3ad10d | ||
|
|
102a0857db | ||
|
|
9510988abc | ||
|
|
beacd54b7b | ||
|
|
dd25ddfbe8 | ||
|
|
49bd9cbd3c | ||
|
|
2a8da76907 | ||
|
|
ffa3d15881 | ||
|
|
551b0b9eea | ||
|
|
1dd697bdc2 | ||
|
|
001292c780 | ||
|
|
8ea58618db | ||
|
|
6088e2dc0f | ||
|
|
0a35eb2dda | ||
|
|
980fa36fac | ||
|
|
bc383b8f7b | ||
|
|
eded6ebf64 | ||
|
|
5af921e8d9 | ||
|
|
c7a2e28daa | ||
|
|
549a73bd78 | ||
|
|
fa1f2af460 | ||
|
|
ab6a61237a | ||
|
|
82e74e35d9 | ||
|
|
a5cd4ebea2 | ||
|
|
0dc891bf37 | ||
|
|
997e6345b5 | ||
|
|
2580da9796 | ||
|
|
f8465c692b | ||
|
|
84147081d4 | ||
|
|
afd1988810 | ||
|
|
be3bfd6fa5 | ||
|
|
29f9865497 | ||
|
|
5c72ab34bf | ||
|
|
4298f2dd92 | ||
|
|
2d3f99ec3a | ||
|
|
13e3161f2a | ||
|
|
5a51929aa3 | ||
|
|
44028060c3 | ||
|
|
44679e71a2 | ||
|
|
cd77d67efb | ||
|
|
92f8bf7b6f | ||
|
|
069b485b7e | ||
|
|
002208f425 | ||
|
|
31de16cee3 | ||
|
|
32ec751996 | ||
|
|
c0dea8b164 | ||
|
|
431a37fa5b | ||
|
|
76fde43fca | ||
|
|
8891f559ef | ||
|
|
878664778e | ||
|
|
9946f758af | ||
|
|
ee8b15708f | ||
|
|
f0dd018d47 | ||
|
|
0bfebde5c9 | ||
|
|
acddb190ba | ||
|
|
08ece2e0fa | ||
|
|
6afe35d263 | ||
|
|
4f92f39720 | ||
|
|
3318034403 | ||
|
|
8d307cb190 | ||
|
|
acc3173030 | ||
|
|
1a3c93f80c | ||
|
|
e99b5086a8 | ||
|
|
98181fb8cb | ||
|
|
f7bc6964be | ||
|
|
dfb9b1b9fb | ||
|
|
56a18a029a | ||
|
|
e9f7f7e2de | ||
|
|
fd0d05d6f2 | ||
|
|
ec12422fae | ||
|
|
f79940c415 | ||
|
|
6d74e0c640 | ||
|
|
ec6dfe2fcd | ||
|
|
702dd9bbda | ||
|
|
671b7ab149 | ||
|
|
4a1006ac34 | ||
|
|
825bec7053 | ||
|
|
1fdbb50abb | ||
|
|
1389bba459 | ||
|
|
916f6f7c87 | ||
|
|
0b0a302855 | ||
|
|
98b2d04348 | ||
|
|
30c155e250 | ||
|
|
097f08608f | ||
|
|
6c1eb77f18 | ||
|
|
5e5e1963c3 | ||
|
|
838e4e3f02 | ||
|
|
0c7b9e50b5 | ||
|
|
aea238e5d1 | ||
|
|
2b64ef2a62 | ||
|
|
e2f6d7b523 | ||
|
|
68051774d8 | ||
|
|
a599afa384 | ||
|
|
3d0da92784 | ||
|
|
88d2a3a1fb | ||
|
|
e724bc5f51 | ||
|
|
32558558c0 | ||
|
|
07bac1777f | ||
|
|
b4a2e9da50 | ||
|
|
ecea4a2f38 | ||
|
|
30edafabe7 | ||
|
|
f4a87fdbcb | ||
|
|
58cce27d45 | ||
|
|
241cc02fa8 | ||
|
|
096d36adc5 | ||
|
|
0589c84176 | ||
|
|
e3a3950e3d | ||
|
|
cf7a58f627 | ||
|
|
af01b462d5 | ||
|
|
1b9d437be8 | ||
|
|
512b2c7389 | ||
|
|
d716bd4648 | ||
|
|
42e55e4f86 | ||
|
|
dbba0d1956 | ||
|
|
3b779ecade | ||
|
|
d9f3e7a2ec | ||
|
|
e0346d400f | ||
|
|
5adb7ab948 | ||
|
|
f496256d18 | ||
|
|
d05932fb2c | ||
|
|
938aec492e | ||
|
|
12395732e8 | ||
|
|
af5da7d412 | ||
|
|
b2dea631d1 | ||
|
|
19984ce4df | ||
|
|
fce91ebbd4 | ||
|
|
58705d5ac3 | ||
|
|
f87777b9f5 | ||
|
|
09b9ea466f | ||
|
|
d530470d30 | ||
|
|
0c4d2a6e5e | ||
|
|
a88f6f56c7 | ||
|
|
2a5e7c33df | ||
|
|
5147c49498 | ||
|
|
9b16392648 | ||
|
|
404782c9d7 | ||
|
|
21cd65fccf | ||
|
|
3c12c6f482 | ||
|
|
b8ecb1671b | ||
|
|
b3c661a9f6 | ||
|
|
89f12c87eb | ||
|
|
f8652571f7 | ||
|
|
32fa588f47 | ||
|
|
8d886b42ec | ||
|
|
d0870a022e | ||
|
|
df66885fa4 | ||
|
|
f8d8f691bc | ||
|
|
e1503fc306 | ||
|
|
4a74f46e56 | ||
|
|
6b00b76f32 | ||
|
|
65f58afbd9 | ||
|
|
c2f08c57e0 | ||
|
|
2583c60487 | ||
|
|
c2e578cb14 | ||
|
|
6a514b6843 | ||
|
|
48e860be20 | ||
|
|
0304989392 | ||
|
|
ab0b85d7d2 | ||
|
|
07a47c047b | ||
|
|
567c7d1839 | ||
|
|
74e941e432 | ||
|
|
57e36d7dc2 | ||
|
|
a366318ac6 | ||
|
|
6d0bcec1cb | ||
|
|
e9a3b69fd9 |
9
.azure-pipelines/README.md
Normal file
9
.azure-pipelines/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
<!--
|
||||
Copyright (c) Ansible Project
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
-->
|
||||
|
||||
## Azure Pipelines Configuration
|
||||
|
||||
Please see the [Documentation](https://github.com/ansible/community/wiki/Testing:-Azure-Pipelines) for more information.
|
||||
420
.azure-pipelines/azure-pipelines.yml
Normal file
420
.azure-pipelines/azure-pipelines.yml
Normal file
@@ -0,0 +1,420 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- stable-*
|
||||
|
||||
pr:
|
||||
autoCancel: true
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- stable-*
|
||||
|
||||
schedules:
|
||||
- cron: 0 8 * * *
|
||||
displayName: Nightly (main)
|
||||
always: true
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- cron: 0 10 * * *
|
||||
displayName: Nightly (active stable branches)
|
||||
always: true
|
||||
branches:
|
||||
include:
|
||||
- stable-9
|
||||
- stable-8
|
||||
- cron: 0 11 * * 0
|
||||
displayName: Weekly (old stable branches)
|
||||
always: true
|
||||
branches:
|
||||
include:
|
||||
- stable-7
|
||||
|
||||
variables:
|
||||
- name: checkoutPath
|
||||
value: ansible_collections/community/general
|
||||
- name: coverageBranches
|
||||
value: main
|
||||
- name: pipelinesCoverage
|
||||
value: coverage
|
||||
- name: entryPoint
|
||||
value: tests/utils/shippable/shippable.sh
|
||||
- name: fetchDepth
|
||||
value: 0
|
||||
|
||||
resources:
|
||||
containers:
|
||||
- container: default
|
||||
image: quay.io/ansible/azure-pipelines-test-container:6.0.0
|
||||
|
||||
pool: Standard
|
||||
|
||||
stages:
|
||||
### Sanity
|
||||
- stage: Sanity_devel
|
||||
displayName: Sanity devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Test {0}
|
||||
testFormat: devel/sanity/{0}
|
||||
targets:
|
||||
- test: 1
|
||||
- test: 2
|
||||
- test: 3
|
||||
- test: 4
|
||||
- test: extra
|
||||
- stage: Sanity_2_17
|
||||
displayName: Sanity 2.17
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Test {0}
|
||||
testFormat: 2.17/sanity/{0}
|
||||
targets:
|
||||
- test: 1
|
||||
- test: 2
|
||||
- test: 3
|
||||
- test: 4
|
||||
- stage: Sanity_2_16
|
||||
displayName: Sanity 2.16
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Test {0}
|
||||
testFormat: 2.16/sanity/{0}
|
||||
targets:
|
||||
- test: 1
|
||||
- test: 2
|
||||
- test: 3
|
||||
- test: 4
|
||||
- stage: Sanity_2_15
|
||||
displayName: Sanity 2.15
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Test {0}
|
||||
testFormat: 2.15/sanity/{0}
|
||||
targets:
|
||||
- test: 1
|
||||
- test: 2
|
||||
- test: 3
|
||||
- test: 4
|
||||
### Units
|
||||
- stage: Units_devel
|
||||
displayName: Units devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: devel/units/{0}/1
|
||||
targets:
|
||||
- test: 3.8
|
||||
- test: 3.9
|
||||
- test: '3.10'
|
||||
- test: '3.11'
|
||||
- test: '3.12'
|
||||
- test: '3.13'
|
||||
- stage: Units_2_17
|
||||
displayName: Units 2.17
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.17/units/{0}/1
|
||||
targets:
|
||||
- test: 3.7
|
||||
- test: "3.12"
|
||||
- stage: Units_2_16
|
||||
displayName: Units 2.16
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.16/units/{0}/1
|
||||
targets:
|
||||
- test: 2.7
|
||||
- test: 3.6
|
||||
- test: "3.11"
|
||||
- stage: Units_2_15
|
||||
displayName: Units 2.15
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.15/units/{0}/1
|
||||
targets:
|
||||
- test: 3.5
|
||||
- test: "3.10"
|
||||
|
||||
## Remote
|
||||
- stage: Remote_devel_extra_vms
|
||||
displayName: Remote devel extra VMs
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: devel/{0}
|
||||
targets:
|
||||
- name: Alpine 3.19
|
||||
test: alpine/3.19
|
||||
# - name: Fedora 39
|
||||
# test: fedora/39
|
||||
- name: Ubuntu 22.04
|
||||
test: ubuntu/22.04
|
||||
groups:
|
||||
- vm
|
||||
- stage: Remote_devel
|
||||
displayName: Remote devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: devel/{0}
|
||||
targets:
|
||||
- name: macOS 14.3
|
||||
test: macos/14.3
|
||||
- name: RHEL 9.3
|
||||
test: rhel/9.3
|
||||
- name: FreeBSD 14.0
|
||||
test: freebsd/14.0
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Remote_2_17
|
||||
displayName: Remote 2.17
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.17/{0}
|
||||
targets:
|
||||
- name: FreeBSD 13.3
|
||||
test: freebsd/13.3
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Remote_2_16
|
||||
displayName: Remote 2.16
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.16/{0}
|
||||
targets:
|
||||
- name: macOS 13.2
|
||||
test: macos/13.2
|
||||
- name: RHEL 9.2
|
||||
test: rhel/9.2
|
||||
- name: RHEL 8.8
|
||||
test: rhel/8.8
|
||||
- name: FreeBSD 13.2
|
||||
test: freebsd/13.2
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Remote_2_15
|
||||
displayName: Remote 2.15
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.15/{0}
|
||||
targets:
|
||||
- name: RHEL 9.1
|
||||
test: rhel/9.1
|
||||
- name: RHEL 8.7
|
||||
test: rhel/8.7
|
||||
- name: RHEL 7.9
|
||||
test: rhel/7.9
|
||||
# - name: FreeBSD 13.1
|
||||
# test: freebsd/13.1
|
||||
# - name: FreeBSD 12.4
|
||||
# test: freebsd/12.4
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
|
||||
### Docker
|
||||
- stage: Docker_devel
|
||||
displayName: Docker devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: devel/linux/{0}
|
||||
targets:
|
||||
- name: Fedora 39
|
||||
test: fedora39
|
||||
- name: Ubuntu 20.04
|
||||
test: ubuntu2004
|
||||
- name: Ubuntu 22.04
|
||||
test: ubuntu2204
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Docker_2_17
|
||||
displayName: Docker 2.17
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.17/linux/{0}
|
||||
targets:
|
||||
- name: Alpine 3.19
|
||||
test: alpine319
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Docker_2_16
|
||||
displayName: Docker 2.16
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.16/linux/{0}
|
||||
targets:
|
||||
- name: Fedora 38
|
||||
test: fedora38
|
||||
- name: openSUSE 15
|
||||
test: opensuse15
|
||||
- name: Alpine 3
|
||||
test: alpine3
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Docker_2_15
|
||||
displayName: Docker 2.15
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.15/linux/{0}
|
||||
targets:
|
||||
- name: Fedora 37
|
||||
test: fedora37
|
||||
- name: CentOS 7
|
||||
test: centos7
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
|
||||
### Community Docker
|
||||
- stage: Docker_community_devel
|
||||
displayName: Docker (community images) devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: devel/linux-community/{0}
|
||||
targets:
|
||||
- name: Debian Bullseye
|
||||
test: debian-bullseye/3.9
|
||||
- name: Debian Bookworm
|
||||
test: debian-bookworm/3.11
|
||||
- name: ArchLinux
|
||||
test: archlinux/3.12
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
|
||||
### Generic
|
||||
- stage: Generic_devel
|
||||
displayName: Generic devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: devel/generic/{0}/1
|
||||
targets:
|
||||
- test: '3.8'
|
||||
- test: '3.11'
|
||||
- test: '3.13'
|
||||
- stage: Generic_2_17
|
||||
displayName: Generic 2.17
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.17/generic/{0}/1
|
||||
targets:
|
||||
- test: '3.7'
|
||||
- test: '3.12'
|
||||
- stage: Generic_2_16
|
||||
displayName: Generic 2.16
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.16/generic/{0}/1
|
||||
targets:
|
||||
- test: '2.7'
|
||||
- test: '3.6'
|
||||
- test: '3.11'
|
||||
- stage: Generic_2_15
|
||||
displayName: Generic 2.15
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.15/generic/{0}/1
|
||||
targets:
|
||||
- test: '3.9'
|
||||
|
||||
- stage: Summary
|
||||
condition: succeededOrFailed()
|
||||
dependsOn:
|
||||
- Sanity_devel
|
||||
- Sanity_2_17
|
||||
- Sanity_2_16
|
||||
- Sanity_2_15
|
||||
- Units_devel
|
||||
- Units_2_17
|
||||
- Units_2_16
|
||||
- Units_2_15
|
||||
- Remote_devel_extra_vms
|
||||
- Remote_devel
|
||||
- Remote_2_17
|
||||
- Remote_2_16
|
||||
- Remote_2_15
|
||||
- Docker_devel
|
||||
- Docker_2_17
|
||||
- Docker_2_16
|
||||
- Docker_2_15
|
||||
- Docker_community_devel
|
||||
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
|
||||
# - Generic_devel
|
||||
# - Generic_2_17
|
||||
# - Generic_2_16
|
||||
# - Generic_2_15
|
||||
jobs:
|
||||
- template: templates/coverage.yml
|
||||
24
.azure-pipelines/scripts/aggregate-coverage.sh
Executable file
24
.azure-pipelines/scripts/aggregate-coverage.sh
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env bash
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Aggregate code coverage results for later processing.
|
||||
|
||||
set -o pipefail -eu
|
||||
|
||||
agent_temp_directory="$1"
|
||||
|
||||
PATH="${PWD}/bin:${PATH}"
|
||||
|
||||
mkdir "${agent_temp_directory}/coverage/"
|
||||
|
||||
options=(--venv --venv-system-site-packages --color -v)
|
||||
|
||||
ansible-test coverage combine --group-by command --export "${agent_temp_directory}/coverage/" "${options[@]}"
|
||||
|
||||
if ansible-test coverage analyze targets generate --help >/dev/null 2>&1; then
|
||||
# Only analyze coverage if the installed version of ansible-test supports it.
|
||||
# Doing so allows this script to work unmodified for multiple Ansible versions.
|
||||
ansible-test coverage analyze targets generate "${agent_temp_directory}/coverage/coverage-analyze-targets.json" "${options[@]}"
|
||||
fi
|
||||
64
.azure-pipelines/scripts/combine-coverage.py
Executable file
64
.azure-pipelines/scripts/combine-coverage.py
Executable file
@@ -0,0 +1,64 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
"""
|
||||
Combine coverage data from multiple jobs, keeping the data only from the most recent attempt from each job.
|
||||
Coverage artifacts must be named using the format: "Coverage $(System.JobAttempt) {StableUniqueNameForEachJob}"
|
||||
The recommended coverage artifact name format is: Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)
|
||||
Keep in mind that Azure Pipelines does not enforce unique job display names (only names).
|
||||
It is up to pipeline authors to avoid name collisions when deviating from the recommended format.
|
||||
"""
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
"""Main program entry point."""
|
||||
source_directory = sys.argv[1]
|
||||
|
||||
if '/ansible_collections/' in os.getcwd():
|
||||
output_path = "tests/output"
|
||||
else:
|
||||
output_path = "test/results"
|
||||
|
||||
destination_directory = os.path.join(output_path, 'coverage')
|
||||
|
||||
if not os.path.exists(destination_directory):
|
||||
os.makedirs(destination_directory)
|
||||
|
||||
jobs = {}
|
||||
count = 0
|
||||
|
||||
for name in os.listdir(source_directory):
|
||||
match = re.search('^Coverage (?P<attempt>[0-9]+) (?P<label>.+)$', name)
|
||||
label = match.group('label')
|
||||
attempt = int(match.group('attempt'))
|
||||
jobs[label] = max(attempt, jobs.get(label, 0))
|
||||
|
||||
for label, attempt in jobs.items():
|
||||
name = 'Coverage {attempt} {label}'.format(label=label, attempt=attempt)
|
||||
source = os.path.join(source_directory, name)
|
||||
source_files = os.listdir(source)
|
||||
|
||||
for source_file in source_files:
|
||||
source_path = os.path.join(source, source_file)
|
||||
destination_path = os.path.join(destination_directory, source_file + '.' + label)
|
||||
print('"%s" -> "%s"' % (source_path, destination_path))
|
||||
shutil.copyfile(source_path, destination_path)
|
||||
count += 1
|
||||
|
||||
print('Coverage file count: %d' % count)
|
||||
print('##vso[task.setVariable variable=coverageFileCount]%d' % count)
|
||||
print('##vso[task.setVariable variable=outputPath]%s' % output_path)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
28
.azure-pipelines/scripts/process-results.sh
Executable file
28
.azure-pipelines/scripts/process-results.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Check the test results and set variables for use in later steps.
|
||||
|
||||
set -o pipefail -eu
|
||||
|
||||
if [[ "$PWD" =~ /ansible_collections/ ]]; then
|
||||
output_path="tests/output"
|
||||
else
|
||||
output_path="test/results"
|
||||
fi
|
||||
|
||||
echo "##vso[task.setVariable variable=outputPath]${output_path}"
|
||||
|
||||
if compgen -G "${output_path}"'/junit/*.xml' > /dev/null; then
|
||||
echo "##vso[task.setVariable variable=haveTestResults]true"
|
||||
fi
|
||||
|
||||
if compgen -G "${output_path}"'/bot/ansible-test-*' > /dev/null; then
|
||||
echo "##vso[task.setVariable variable=haveBotResults]true"
|
||||
fi
|
||||
|
||||
if compgen -G "${output_path}"'/coverage/*' > /dev/null; then
|
||||
echo "##vso[task.setVariable variable=haveCoverageData]true"
|
||||
fi
|
||||
105
.azure-pipelines/scripts/publish-codecov.py
Executable file
105
.azure-pipelines/scripts/publish-codecov.py
Executable file
@@ -0,0 +1,105 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
"""
|
||||
Upload code coverage reports to codecov.io.
|
||||
Multiple coverage files from multiple languages are accepted and aggregated after upload.
|
||||
Python coverage, as well as PowerShell and Python stubs can all be uploaded.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import dataclasses
|
||||
import pathlib
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
import typing as t
|
||||
import urllib.request
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class CoverageFile:
|
||||
name: str
|
||||
path: pathlib.Path
|
||||
flags: t.List[str]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Args:
|
||||
dry_run: bool
|
||||
path: pathlib.Path
|
||||
|
||||
|
||||
def parse_args() -> Args:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-n', '--dry-run', action='store_true')
|
||||
parser.add_argument('path', type=pathlib.Path)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Store arguments in a typed dataclass
|
||||
fields = dataclasses.fields(Args)
|
||||
kwargs = {field.name: getattr(args, field.name) for field in fields}
|
||||
|
||||
return Args(**kwargs)
|
||||
|
||||
|
||||
def process_files(directory: pathlib.Path) -> t.Tuple[CoverageFile, ...]:
|
||||
processed = []
|
||||
for file in directory.joinpath('reports').glob('coverage*.xml'):
|
||||
name = file.stem.replace('coverage=', '')
|
||||
|
||||
# Get flags from name
|
||||
flags = name.replace('-powershell', '').split('=') # Drop '-powershell' suffix
|
||||
flags = [flag if not flag.startswith('stub') else flag.split('-')[0] for flag in flags] # Remove "-01" from stub files
|
||||
|
||||
processed.append(CoverageFile(name, file, flags))
|
||||
|
||||
return tuple(processed)
|
||||
|
||||
|
||||
def upload_files(codecov_bin: pathlib.Path, files: t.Tuple[CoverageFile, ...], dry_run: bool = False) -> None:
|
||||
for file in files:
|
||||
cmd = [
|
||||
str(codecov_bin),
|
||||
'--name', file.name,
|
||||
'--file', str(file.path),
|
||||
]
|
||||
for flag in file.flags:
|
||||
cmd.extend(['--flags', flag])
|
||||
|
||||
if dry_run:
|
||||
print(f'DRY-RUN: Would run command: {cmd}')
|
||||
continue
|
||||
|
||||
subprocess.run(cmd, check=True)
|
||||
|
||||
|
||||
def download_file(url: str, dest: pathlib.Path, flags: int, dry_run: bool = False) -> None:
|
||||
if dry_run:
|
||||
print(f'DRY-RUN: Would download {url} to {dest} and set mode to {flags:o}')
|
||||
return
|
||||
|
||||
with urllib.request.urlopen(url) as resp:
|
||||
with dest.open('w+b') as f:
|
||||
# Read data in chunks rather than all at once
|
||||
shutil.copyfileobj(resp, f, 64 * 1024)
|
||||
|
||||
dest.chmod(flags)
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
url = 'https://ansible-ci-files.s3.amazonaws.com/codecov/linux/codecov'
|
||||
with tempfile.TemporaryDirectory(prefix='codecov-') as tmpdir:
|
||||
codecov_bin = pathlib.Path(tmpdir) / 'codecov'
|
||||
download_file(url, codecov_bin, 0o755, args.dry_run)
|
||||
|
||||
files = process_files(args.path)
|
||||
upload_files(codecov_bin, files, args.dry_run)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
19
.azure-pipelines/scripts/report-coverage.sh
Executable file
19
.azure-pipelines/scripts/report-coverage.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Generate code coverage reports for uploading to Azure Pipelines and codecov.io.
|
||||
|
||||
set -o pipefail -eu
|
||||
|
||||
PATH="${PWD}/bin:${PATH}"
|
||||
|
||||
if ! ansible-test --help >/dev/null 2>&1; then
|
||||
# Install the devel version of ansible-test for generating code coverage reports.
|
||||
# This is only used by Ansible Collections, which are typically tested against multiple Ansible versions (in separate jobs).
|
||||
# Since a version of ansible-test is required that can work the output from multiple older releases, the devel version is used.
|
||||
pip install https://github.com/ansible/ansible/archive/devel.tar.gz --disable-pip-version-check
|
||||
fi
|
||||
|
||||
ansible-test coverage xml --group-by command --stub --venv --venv-system-site-packages --color -v
|
||||
38
.azure-pipelines/scripts/run-tests.sh
Executable file
38
.azure-pipelines/scripts/run-tests.sh
Executable file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env bash
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Configure the test environment and run the tests.
|
||||
|
||||
set -o pipefail -eu
|
||||
|
||||
entry_point="$1"
|
||||
test="$2"
|
||||
read -r -a coverage_branches <<< "$3" # space separated list of branches to run code coverage on for scheduled builds
|
||||
|
||||
export COMMIT_MESSAGE
|
||||
export COMPLETE
|
||||
export COVERAGE
|
||||
export IS_PULL_REQUEST
|
||||
|
||||
if [ "${SYSTEM_PULLREQUEST_TARGETBRANCH:-}" ]; then
|
||||
IS_PULL_REQUEST=true
|
||||
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD^2)
|
||||
else
|
||||
IS_PULL_REQUEST=
|
||||
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD)
|
||||
fi
|
||||
|
||||
COMPLETE=
|
||||
COVERAGE=
|
||||
|
||||
if [ "${BUILD_REASON}" = "Schedule" ]; then
|
||||
COMPLETE=yes
|
||||
|
||||
if printf '%s\n' "${coverage_branches[@]}" | grep -q "^${BUILD_SOURCEBRANCHNAME}$"; then
|
||||
COVERAGE=yes
|
||||
fi
|
||||
fi
|
||||
|
||||
"${entry_point}" "${test}" 2>&1 | "$(dirname "$0")/time-command.py"
|
||||
29
.azure-pipelines/scripts/time-command.py
Executable file
29
.azure-pipelines/scripts/time-command.py
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
"""Prepends a relative timestamp to each input line from stdin and writes it to stdout."""
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import sys
|
||||
import time
|
||||
|
||||
|
||||
def main():
|
||||
"""Main program entry point."""
|
||||
start = time.time()
|
||||
|
||||
sys.stdin.reconfigure(errors='surrogateescape')
|
||||
sys.stdout.reconfigure(errors='surrogateescape')
|
||||
|
||||
for line in sys.stdin:
|
||||
seconds = time.time() - start
|
||||
sys.stdout.write('%02d:%02d %s' % (seconds // 60, seconds % 60, line))
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
44
.azure-pipelines/templates/coverage.yml
Normal file
44
.azure-pipelines/templates/coverage.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# This template adds a job for processing code coverage data.
|
||||
# It will upload results to Azure Pipelines and codecov.io.
|
||||
# Use it from a job stage that completes after all other jobs have completed.
|
||||
# This can be done by placing it in a separate summary stage that runs after the test stage(s) have completed.
|
||||
|
||||
jobs:
|
||||
- job: Coverage
|
||||
displayName: Code Coverage
|
||||
container: default
|
||||
workspace:
|
||||
clean: all
|
||||
steps:
|
||||
- checkout: self
|
||||
fetchDepth: $(fetchDepth)
|
||||
path: $(checkoutPath)
|
||||
- task: DownloadPipelineArtifact@2
|
||||
displayName: Download Coverage Data
|
||||
inputs:
|
||||
path: coverage/
|
||||
patterns: "Coverage */*=coverage.combined"
|
||||
- bash: .azure-pipelines/scripts/combine-coverage.py coverage/
|
||||
displayName: Combine Coverage Data
|
||||
- bash: .azure-pipelines/scripts/report-coverage.sh
|
||||
displayName: Generate Coverage Report
|
||||
condition: gt(variables.coverageFileCount, 0)
|
||||
- task: PublishCodeCoverageResults@1
|
||||
inputs:
|
||||
codeCoverageTool: Cobertura
|
||||
# Azure Pipelines only accepts a single coverage data file.
|
||||
# That means only Python or PowerShell coverage can be uploaded, but not both.
|
||||
# Set the "pipelinesCoverage" variable to determine which type is uploaded.
|
||||
# Use "coverage" for Python and "coverage-powershell" for PowerShell.
|
||||
summaryFileLocation: "$(outputPath)/reports/$(pipelinesCoverage).xml"
|
||||
displayName: Publish to Azure Pipelines
|
||||
condition: gt(variables.coverageFileCount, 0)
|
||||
- bash: .azure-pipelines/scripts/publish-codecov.py "$(outputPath)"
|
||||
displayName: Publish to codecov.io
|
||||
condition: gt(variables.coverageFileCount, 0)
|
||||
continueOnError: true
|
||||
60
.azure-pipelines/templates/matrix.yml
Normal file
60
.azure-pipelines/templates/matrix.yml
Normal file
@@ -0,0 +1,60 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# This template uses the provided targets and optional groups to generate a matrix which is then passed to the test template.
|
||||
# If this matrix template does not provide the required functionality, consider using the test template directly instead.
|
||||
|
||||
parameters:
|
||||
# A required list of dictionaries, one per test target.
|
||||
# Each item in the list must contain a "test" or "name" key.
|
||||
# Both may be provided. If one is omitted, the other will be used.
|
||||
- name: targets
|
||||
type: object
|
||||
|
||||
# An optional list of values which will be used to multiply the targets list into a matrix.
|
||||
# Values can be strings or numbers.
|
||||
- name: groups
|
||||
type: object
|
||||
default: []
|
||||
|
||||
# An optional format string used to generate the job name.
|
||||
# - {0} is the name of an item in the targets list.
|
||||
- name: nameFormat
|
||||
type: string
|
||||
default: "{0}"
|
||||
|
||||
# An optional format string used to generate the test name.
|
||||
# - {0} is the name of an item in the targets list.
|
||||
- name: testFormat
|
||||
type: string
|
||||
default: "{0}"
|
||||
|
||||
# An optional format string used to add the group to the job name.
|
||||
# {0} is the formatted name of an item in the targets list.
|
||||
# {{1}} is the group -- be sure to include the double "{{" and "}}".
|
||||
- name: nameGroupFormat
|
||||
type: string
|
||||
default: "{0} - {{1}}"
|
||||
|
||||
# An optional format string used to add the group to the test name.
|
||||
# {0} is the formatted test of an item in the targets list.
|
||||
# {{1}} is the group -- be sure to include the double "{{" and "}}".
|
||||
- name: testGroupFormat
|
||||
type: string
|
||||
default: "{0}/{{1}}"
|
||||
|
||||
jobs:
|
||||
- template: test.yml
|
||||
parameters:
|
||||
jobs:
|
||||
- ${{ if eq(length(parameters.groups), 0) }}:
|
||||
- ${{ each target in parameters.targets }}:
|
||||
- name: ${{ format(parameters.nameFormat, coalesce(target.name, target.test)) }}
|
||||
test: ${{ format(parameters.testFormat, coalesce(target.test, target.name)) }}
|
||||
- ${{ if not(eq(length(parameters.groups), 0)) }}:
|
||||
- ${{ each group in parameters.groups }}:
|
||||
- ${{ each target in parameters.targets }}:
|
||||
- name: ${{ format(format(parameters.nameGroupFormat, parameters.nameFormat), coalesce(target.name, target.test), group) }}
|
||||
test: ${{ format(format(parameters.testGroupFormat, parameters.testFormat), coalesce(target.test, target.name), group) }}
|
||||
50
.azure-pipelines/templates/test.yml
Normal file
50
.azure-pipelines/templates/test.yml
Normal file
@@ -0,0 +1,50 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# This template uses the provided list of jobs to create test one or more test jobs.
|
||||
# It can be used directly if needed, or through the matrix template.
|
||||
|
||||
parameters:
|
||||
# A required list of dictionaries, one per test job.
|
||||
# Each item in the list must contain a "job" and "name" key.
|
||||
- name: jobs
|
||||
type: object
|
||||
|
||||
jobs:
|
||||
- ${{ each job in parameters.jobs }}:
|
||||
- job: test_${{ replace(replace(replace(job.test, '/', '_'), '.', '_'), '-', '_') }}
|
||||
displayName: ${{ job.name }}
|
||||
container: default
|
||||
workspace:
|
||||
clean: all
|
||||
steps:
|
||||
- checkout: self
|
||||
fetchDepth: $(fetchDepth)
|
||||
path: $(checkoutPath)
|
||||
- bash: .azure-pipelines/scripts/run-tests.sh "$(entryPoint)" "${{ job.test }}" "$(coverageBranches)"
|
||||
displayName: Run Tests
|
||||
- bash: .azure-pipelines/scripts/process-results.sh
|
||||
condition: succeededOrFailed()
|
||||
displayName: Process Results
|
||||
- bash: .azure-pipelines/scripts/aggregate-coverage.sh "$(Agent.TempDirectory)"
|
||||
condition: eq(variables.haveCoverageData, 'true')
|
||||
displayName: Aggregate Coverage Data
|
||||
- task: PublishTestResults@2
|
||||
condition: eq(variables.haveTestResults, 'true')
|
||||
inputs:
|
||||
testResultsFiles: "$(outputPath)/junit/*.xml"
|
||||
displayName: Publish Test Results
|
||||
- task: PublishPipelineArtifact@1
|
||||
condition: eq(variables.haveBotResults, 'true')
|
||||
displayName: Publish Bot Results
|
||||
inputs:
|
||||
targetPath: "$(outputPath)/bot/"
|
||||
artifactName: "Bot $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
|
||||
- task: PublishPipelineArtifact@1
|
||||
condition: eq(variables.haveCoverageData, 'true')
|
||||
displayName: Publish Coverage Data
|
||||
inputs:
|
||||
targetPath: "$(Agent.TempDirectory)/coverage/"
|
||||
artifactName: "Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
|
||||
91
.github/BOTMETA.yml
vendored
91
.github/BOTMETA.yml
vendored
@@ -33,6 +33,8 @@ files:
|
||||
maintainers: $team_ansible_core
|
||||
$becomes/pmrun.py:
|
||||
maintainers: $team_ansible_core
|
||||
$becomes/run0.py:
|
||||
maintainers: konstruktoid
|
||||
$becomes/sesu.py:
|
||||
maintainers: nekonyuu
|
||||
$becomes/sudosu.py:
|
||||
@@ -89,6 +91,8 @@ files:
|
||||
maintainers: ryancurrah
|
||||
$callbacks/syslog_json.py:
|
||||
maintainers: imjoseangel
|
||||
$callbacks/timestamp.py:
|
||||
maintainers: kurokobo
|
||||
$callbacks/unixy.py:
|
||||
labels: unixy
|
||||
maintainers: akatch
|
||||
@@ -117,6 +121,8 @@ files:
|
||||
maintainers: $team_ansible_core
|
||||
$doc_fragments/:
|
||||
labels: docs_fragments
|
||||
$doc_fragments/django.py:
|
||||
maintainers: russoz
|
||||
$doc_fragments/hpe3par.py:
|
||||
labels: hpe3par
|
||||
maintainers: farhan7500 gautamphegde
|
||||
@@ -151,6 +157,8 @@ files:
|
||||
$filters/jc.py:
|
||||
maintainers: kellyjonbrazil
|
||||
$filters/json_query.py: {}
|
||||
$filters/keep_keys.py:
|
||||
maintainers: vbotka
|
||||
$filters/lists.py:
|
||||
maintainers: cfiehe
|
||||
$filters/lists_difference.yml:
|
||||
@@ -164,6 +172,10 @@ files:
|
||||
$filters/lists_union.yml:
|
||||
maintainers: cfiehe
|
||||
$filters/random_mac.py: {}
|
||||
$filters/remove_keys.py:
|
||||
maintainers: vbotka
|
||||
$filters/replace_keys.py:
|
||||
maintainers: vbotka
|
||||
$filters/time.py:
|
||||
maintainers: resmo
|
||||
$filters/to_days.yml:
|
||||
@@ -294,8 +306,12 @@ files:
|
||||
labels: module_utils
|
||||
$module_utils/btrfs.py:
|
||||
maintainers: gnfzdz
|
||||
$module_utils/cmd_runner.py:
|
||||
maintainers: russoz
|
||||
$module_utils/deps.py:
|
||||
maintainers: russoz
|
||||
$module_utils/django.py:
|
||||
maintainers: russoz
|
||||
$module_utils/gconftool2.py:
|
||||
labels: gconftool2
|
||||
maintainers: russoz
|
||||
@@ -339,6 +355,8 @@ files:
|
||||
$module_utils/pipx.py:
|
||||
labels: pipx
|
||||
maintainers: russoz
|
||||
$module_utils/python_runner.py:
|
||||
maintainers: russoz
|
||||
$module_utils/puppet.py:
|
||||
labels: puppet
|
||||
maintainers: russoz
|
||||
@@ -425,7 +443,7 @@ files:
|
||||
$modules/bearychat.py:
|
||||
maintainers: tonyseek
|
||||
$modules/bigpanda.py:
|
||||
ignore: hkariti
|
||||
maintainers: hkariti
|
||||
$modules/bitbucket_:
|
||||
maintainers: catcombo
|
||||
$modules/bower.py:
|
||||
@@ -490,6 +508,12 @@ files:
|
||||
maintainers: tintoy
|
||||
$modules/discord.py:
|
||||
maintainers: cwollinger
|
||||
$modules/django_check.py:
|
||||
maintainers: russoz
|
||||
$modules/django_command.py:
|
||||
maintainers: russoz
|
||||
$modules/django_createcachetable.py:
|
||||
maintainers: russoz
|
||||
$modules/django_manage.py:
|
||||
ignore: scottanderson42 tastychutney
|
||||
labels: django_manage
|
||||
@@ -532,8 +556,6 @@ files:
|
||||
maintainers: $team_flatpak
|
||||
$modules/flatpak_remote.py:
|
||||
maintainers: $team_flatpak
|
||||
$modules/flowdock.py:
|
||||
ignore: mcodd
|
||||
$modules/gandi_livedns.py:
|
||||
maintainers: gthiemonge
|
||||
$modules/gconftool2.py:
|
||||
@@ -1096,46 +1118,6 @@ files:
|
||||
$modules/python_requirements_info.py:
|
||||
ignore: ryansb
|
||||
maintainers: willthames
|
||||
$modules/rax:
|
||||
ignore: ryansb sivel
|
||||
$modules/rax.py:
|
||||
maintainers: omgjlk sivel
|
||||
$modules/rax_cbs.py:
|
||||
maintainers: claco
|
||||
$modules/rax_cbs_attachments.py:
|
||||
maintainers: claco
|
||||
$modules/rax_cdb.py:
|
||||
maintainers: jails
|
||||
$modules/rax_cdb_database.py:
|
||||
maintainers: jails
|
||||
$modules/rax_cdb_user.py:
|
||||
maintainers: jails
|
||||
$modules/rax_clb.py:
|
||||
maintainers: claco
|
||||
$modules/rax_clb_nodes.py:
|
||||
maintainers: neuroid
|
||||
$modules/rax_clb_ssl.py:
|
||||
maintainers: smashwilson
|
||||
$modules/rax_files.py:
|
||||
maintainers: angstwad
|
||||
$modules/rax_files_objects.py:
|
||||
maintainers: angstwad
|
||||
$modules/rax_identity.py:
|
||||
maintainers: claco
|
||||
$modules/rax_mon_alarm.py:
|
||||
maintainers: smashwilson
|
||||
$modules/rax_mon_check.py:
|
||||
maintainers: smashwilson
|
||||
$modules/rax_mon_entity.py:
|
||||
maintainers: smashwilson
|
||||
$modules/rax_mon_notification.py:
|
||||
maintainers: smashwilson
|
||||
$modules/rax_mon_notification_plan.py:
|
||||
maintainers: smashwilson
|
||||
$modules/rax_network.py:
|
||||
maintainers: claco omgjlk
|
||||
$modules/rax_queue.py:
|
||||
maintainers: claco
|
||||
$modules/read_csv.py:
|
||||
maintainers: dagwieers
|
||||
$modules/redfish_:
|
||||
@@ -1300,8 +1282,6 @@ files:
|
||||
maintainers: farhan7500 gautamphegde
|
||||
$modules/ssh_config.py:
|
||||
maintainers: gaqzi Akasurde
|
||||
$modules/stackdriver.py:
|
||||
maintainers: bwhaley
|
||||
$modules/stacki_host.py:
|
||||
labels: stacki_host
|
||||
maintainers: bsanders bbyhuy
|
||||
@@ -1362,19 +1342,16 @@ files:
|
||||
keywords: sophos utm
|
||||
maintainers: $team_e_spirit
|
||||
$modules/utm_ca_host_key_cert.py:
|
||||
ignore: stearz
|
||||
maintainers: $team_e_spirit
|
||||
maintainers: stearz
|
||||
$modules/utm_ca_host_key_cert_info.py:
|
||||
ignore: stearz
|
||||
maintainers: $team_e_spirit
|
||||
maintainers: stearz
|
||||
$modules/utm_network_interface_address.py:
|
||||
maintainers: steamx
|
||||
$modules/utm_network_interface_address_info.py:
|
||||
maintainers: steamx
|
||||
$modules/utm_proxy_auth_profile.py:
|
||||
keywords: sophos utm
|
||||
ignore: stearz
|
||||
maintainers: $team_e_spirit
|
||||
maintainers: $team_e_spirit stearz
|
||||
$modules/utm_proxy_exception.py:
|
||||
keywords: sophos utm
|
||||
maintainers: $team_e_spirit RickS-C137
|
||||
@@ -1397,8 +1374,6 @@ files:
|
||||
maintainers: $team_wdc
|
||||
$modules/wdc_redfish_info.py:
|
||||
maintainers: $team_wdc
|
||||
$modules/webfaction_:
|
||||
maintainers: quentinsf
|
||||
$modules/xattr.py:
|
||||
labels: xattr
|
||||
maintainers: bcoca
|
||||
@@ -1450,6 +1425,8 @@ files:
|
||||
ignore: matze
|
||||
labels: zypper
|
||||
maintainers: $team_suse
|
||||
$plugin_utils/keys_filter.py:
|
||||
maintainers: vbotka
|
||||
$plugin_utils/unsafe.py:
|
||||
maintainers: felixfontein
|
||||
$tests/a_module.py:
|
||||
@@ -1489,6 +1466,10 @@ files:
|
||||
maintainers: baldwinSPC nurfet-becirevic t0mk teebes
|
||||
docs/docsite/rst/guide_scaleway.rst:
|
||||
maintainers: $team_scaleway
|
||||
docs/docsite/rst/guide_deps.rst:
|
||||
maintainers: russoz
|
||||
docs/docsite/rst/guide_vardict.rst:
|
||||
maintainers: russoz
|
||||
docs/docsite/rst/test_guide.rst:
|
||||
maintainers: felixfontein
|
||||
#########################
|
||||
@@ -1520,7 +1501,7 @@ macros:
|
||||
team_ansible_core:
|
||||
team_aix: MorrisA bcoca d-little flynn1973 gforster kairoaraujo marvin-sinister mator molekuul ramooncamacho wtcross
|
||||
team_bsd: JoergFiedler MacLemon bcoca dch jasperla mekanix opoplawski overhacked tuxillo
|
||||
team_consul: sgargan apollo13
|
||||
team_consul: sgargan apollo13 Ilgmi
|
||||
team_cyberark_conjur: jvanderhoof ryanprior
|
||||
team_e_spirit: MatrixCrawler getjack
|
||||
team_flatpak: JayKayy oolongbrothers
|
||||
@@ -1542,6 +1523,6 @@ macros:
|
||||
team_rhsm: cnsnyder ptoscano
|
||||
team_scaleway: remyleone abarbare
|
||||
team_solaris: bcoca fishman jasperla jpdasma mator scathatheworm troy2914 xen0l
|
||||
team_suse: commel evrardjp lrupp AnderEnder alxgu andytom sealor
|
||||
team_suse: commel evrardjp lrupp toabctl AnderEnder alxgu andytom sealor
|
||||
team_virt: joshainglis karmab Thulium-Drake Ajpantuso
|
||||
team_wdc: mikemoerk
|
||||
|
||||
205
.github/workflows/ansible-test.yml
vendored
Normal file
205
.github/workflows/ansible-test.yml
vendored
Normal file
@@ -0,0 +1,205 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# For the comprehensive list of the inputs supported by the ansible-community/ansible-test-gh-action GitHub Action, see
|
||||
# https://github.com/marketplace/actions/ansible-test
|
||||
|
||||
name: EOL CI
|
||||
on:
|
||||
# Run EOL CI against all pushes (direct commits, also merged PRs), Pull Requests
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- stable-*
|
||||
pull_request:
|
||||
# Run EOL CI once per day (at 08:00 UTC)
|
||||
schedule:
|
||||
- cron: '0 8 * * *'
|
||||
|
||||
concurrency:
|
||||
# Make sure there is at most one active run per PR, but do not cancel any non-PR runs
|
||||
group: ${{ github.workflow }}-${{ (github.head_ref && github.event.number) || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
sanity:
|
||||
name: EOL Sanity (Ⓐ${{ matrix.ansible }})
|
||||
strategy:
|
||||
matrix:
|
||||
ansible:
|
||||
- '2.13'
|
||||
- '2.14'
|
||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
||||
# image for these stable branches. The list of branches where this is necessary will
|
||||
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
|
||||
# for the latest list.
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Perform sanity testing
|
||||
uses: felixfontein/ansible-test-gh-action@main
|
||||
with:
|
||||
ansible-core-version: stable-${{ matrix.ansible }}
|
||||
codecov-token: ${{ secrets.CODECOV_TOKEN }}
|
||||
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
||||
pull-request-change-detection: 'true'
|
||||
testing-type: sanity
|
||||
|
||||
units:
|
||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
||||
# image for these stable branches. The list of branches where this is necessary will
|
||||
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
|
||||
# for the latest list.
|
||||
runs-on: ubuntu-latest
|
||||
name: EOL Units (Ⓐ${{ matrix.ansible }}+py${{ matrix.python }})
|
||||
strategy:
|
||||
# As soon as the first unit test fails, cancel the others to free up the CI queue
|
||||
fail-fast: true
|
||||
matrix:
|
||||
ansible:
|
||||
- ''
|
||||
python:
|
||||
- ''
|
||||
exclude:
|
||||
- ansible: ''
|
||||
include:
|
||||
- ansible: '2.13'
|
||||
python: '2.7'
|
||||
- ansible: '2.13'
|
||||
python: '3.8'
|
||||
- ansible: '2.13'
|
||||
python: '2.7'
|
||||
- ansible: '2.13'
|
||||
python: '3.8'
|
||||
- ansible: '2.14'
|
||||
python: '3.9'
|
||||
|
||||
steps:
|
||||
- name: >-
|
||||
Perform unit testing against
|
||||
Ansible version ${{ matrix.ansible }}
|
||||
uses: felixfontein/ansible-test-gh-action@main
|
||||
with:
|
||||
ansible-core-version: stable-${{ matrix.ansible }}
|
||||
codecov-token: ${{ secrets.CODECOV_TOKEN }}
|
||||
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
||||
pre-test-cmd: >-
|
||||
mkdir -p ../../ansible
|
||||
;
|
||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
|
||||
pull-request-change-detection: 'true'
|
||||
target-python-version: ${{ matrix.python }}
|
||||
testing-type: units
|
||||
|
||||
integration:
|
||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
||||
# image for these stable branches. The list of branches where this is necessary will
|
||||
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
|
||||
# for the latest list.
|
||||
runs-on: ubuntu-latest
|
||||
name: EOL I (Ⓐ${{ matrix.ansible }}+${{ matrix.docker }}+py${{ matrix.python }}:${{ matrix.target }})
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
ansible:
|
||||
- ''
|
||||
docker:
|
||||
- ''
|
||||
python:
|
||||
- ''
|
||||
target:
|
||||
- ''
|
||||
exclude:
|
||||
- ansible: ''
|
||||
include:
|
||||
# 2.13
|
||||
- ansible: '2.13'
|
||||
docker: fedora35
|
||||
python: ''
|
||||
target: azp/posix/1/
|
||||
- ansible: '2.13'
|
||||
docker: fedora35
|
||||
python: ''
|
||||
target: azp/posix/2/
|
||||
- ansible: '2.13'
|
||||
docker: fedora35
|
||||
python: ''
|
||||
target: azp/posix/3/
|
||||
- ansible: '2.13'
|
||||
docker: opensuse15py2
|
||||
python: ''
|
||||
target: azp/posix/1/
|
||||
- ansible: '2.13'
|
||||
docker: opensuse15py2
|
||||
python: ''
|
||||
target: azp/posix/2/
|
||||
- ansible: '2.13'
|
||||
docker: opensuse15py2
|
||||
python: ''
|
||||
target: azp/posix/3/
|
||||
- ansible: '2.13'
|
||||
docker: alpine3
|
||||
python: ''
|
||||
target: azp/posix/1/
|
||||
- ansible: '2.13'
|
||||
docker: alpine3
|
||||
python: ''
|
||||
target: azp/posix/2/
|
||||
- ansible: '2.13'
|
||||
docker: alpine3
|
||||
python: ''
|
||||
target: azp/posix/3/
|
||||
# 2.14
|
||||
- ansible: '2.14'
|
||||
docker: alpine3
|
||||
python: ''
|
||||
target: azp/posix/1/
|
||||
- ansible: '2.14'
|
||||
docker: alpine3
|
||||
python: ''
|
||||
target: azp/posix/2/
|
||||
- ansible: '2.14'
|
||||
docker: alpine3
|
||||
python: ''
|
||||
target: azp/posix/3/
|
||||
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
|
||||
# - ansible: '2.13'
|
||||
# docker: default
|
||||
# python: '3.9'
|
||||
# target: azp/generic/1/
|
||||
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
|
||||
# - ansible: '2.14'
|
||||
# docker: default
|
||||
# python: '3.10'
|
||||
# target: azp/generic/1/
|
||||
|
||||
steps:
|
||||
- name: >-
|
||||
Perform integration testing against
|
||||
Ansible version ${{ matrix.ansible }}
|
||||
under Python ${{ matrix.python }}
|
||||
uses: felixfontein/ansible-test-gh-action@main
|
||||
with:
|
||||
ansible-core-version: stable-${{ matrix.ansible }}
|
||||
codecov-token: ${{ secrets.CODECOV_TOKEN }}
|
||||
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
||||
docker-image: ${{ matrix.docker }}
|
||||
integration-continue-on-error: 'false'
|
||||
integration-diff: 'false'
|
||||
integration-retry-on-error: 'true'
|
||||
pre-test-cmd: >-
|
||||
mkdir -p ../../ansible
|
||||
;
|
||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/ansible.posix.git ../../ansible/posix
|
||||
;
|
||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.crypto.git ../../community/crypto
|
||||
;
|
||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
|
||||
pull-request-change-detection: 'true'
|
||||
target: ${{ matrix.target }}
|
||||
target-python-version: ${{ matrix.python }}
|
||||
testing-type: integration
|
||||
36
.github/workflows/codeql-analysis.yml
vendored
Normal file
36
.github/workflows/codeql-analysis.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
name: "Code scanning - action"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '26 19 * * 1'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
|
||||
permissions:
|
||||
actions: read # for github/codeql-action/init to get workflow details
|
||||
contents: read # for actions/checkout to fetch code
|
||||
security-events: write # for github/codeql-action/autobuild to send a status report
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
20
.github/workflows/import-galaxy.yml
vendored
Normal file
20
.github/workflows/import-galaxy.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
name: import-galaxy
|
||||
'on':
|
||||
# Run CI against all pushes (direct commits, also merged PRs) to main, and all Pull Requests
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- stable-*
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
import-galaxy:
|
||||
permissions:
|
||||
contents: read
|
||||
name: Test to import built collection artifact with Galaxy importer
|
||||
uses: ansible-community/github-action-test-galaxy-import/.github/workflows/test-galaxy-import.yml@main
|
||||
30
.github/workflows/reuse.yml
vendored
Normal file
30
.github/workflows/reuse.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
name: Verify REUSE
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request_target:
|
||||
types: [opened, synchronize, reopened]
|
||||
branches: [main]
|
||||
# Run CI once per day (at 07:30 UTC)
|
||||
schedule:
|
||||
- cron: '30 7 * * *'
|
||||
|
||||
jobs:
|
||||
check:
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha || '' }}
|
||||
|
||||
- name: REUSE Compliance Check
|
||||
uses: fsfe/reuse-action@v3
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -512,3 +512,7 @@ $RECYCLE.BIN/
|
||||
|
||||
# Integration tests cloud configs
|
||||
tests/integration/cloud-config-*.ini
|
||||
|
||||
|
||||
# VSCode specific extensions
|
||||
.vscode/settings.json
|
||||
|
||||
5
.reuse/dep5
Normal file
5
.reuse/dep5
Normal file
@@ -0,0 +1,5 @@
|
||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
|
||||
Files: changelogs/fragments/*
|
||||
Copyright: Ansible Project
|
||||
License: GPL-3.0-or-later
|
||||
1246
CHANGELOG.md
1246
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
1126
CHANGELOG.rst
1126
CHANGELOG.rst
File diff suppressed because it is too large
Load Diff
@@ -56,8 +56,6 @@ cd ~/dev/ansible_collections/community/general
|
||||
|
||||
Then you can run `ansible-test` (which is a part of [ansible-core](https://pypi.org/project/ansible-core/)) inside the checkout. The following example commands expect that you have installed Docker or Podman. Note that Podman has only been supported by more recent ansible-core releases. If you are using Docker, the following will work with Ansible 2.9+.
|
||||
|
||||
### Sanity tests
|
||||
|
||||
The following commands show how to run sanity tests:
|
||||
|
||||
```.bash
|
||||
@@ -68,8 +66,6 @@ ansible-test sanity --docker -v
|
||||
ansible-test sanity --docker -v plugins/modules/system/pids.py tests/integration/targets/pids/
|
||||
```
|
||||
|
||||
### Unit tests
|
||||
|
||||
The following commands show how to run unit tests:
|
||||
|
||||
```.bash
|
||||
@@ -83,32 +79,13 @@ ansible-test units --docker -v --python 3.8
|
||||
ansible-test units --docker -v --python 3.8 tests/unit/plugins/modules/net_tools/test_nmcli.py
|
||||
```
|
||||
|
||||
### Integration tests
|
||||
|
||||
The following commands show how to run integration tests:
|
||||
|
||||
#### In Docker
|
||||
|
||||
Integration tests on Docker have the following parameters:
|
||||
- `image_name` (required): The name of the Docker image. To get the list of supported Docker images, run
|
||||
`ansible-test integration --help` and look for _target docker images_.
|
||||
- `test_name` (optional): The name of the integration test.
|
||||
For modules, this equals the short name of the module; for example, `pacman` in case of `community.general.pacman`.
|
||||
For plugins, the plugin type is added before the plugin's short name, for example `callback_yaml` for the `community.general.yaml` callback.
|
||||
```.bash
|
||||
# Test all plugins/modules on fedora40
|
||||
ansible-test integration -v --docker fedora40
|
||||
# Run integration tests for the interfaces_files module in a Docker container using the
|
||||
# fedora35 operating system image (the supported images depend on your ansible-core version):
|
||||
ansible-test integration --docker fedora35 -v interfaces_file
|
||||
|
||||
# Template
|
||||
ansible-test integration -v --docker image_name test_name
|
||||
|
||||
# Example community.general.ini_file module on fedora40 Docker image:
|
||||
ansible-test integration -v --docker fedora40 ini_file
|
||||
```
|
||||
|
||||
#### Without isolation
|
||||
|
||||
```.bash
|
||||
# Run integration tests for the flattened lookup **without any isolation**:
|
||||
ansible-test integration -v lookup_flattened
|
||||
```
|
||||
|
||||
39
README.md
39
README.md
@@ -6,9 +6,8 @@ SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Community General Collection
|
||||
|
||||
[](https://docs.ansible.com/ansible/devel/collections/community/general/)
|
||||
[](https://dev.azure.com/ansible/community.general/_build?definitionId=31)
|
||||
[](https://github.com/ansible-collections/community.general/actions)
|
||||
[](https://dev.azure.com/ansible/community.general/_build?definitionId=31)
|
||||
[](https://github.com/ansible-collections/community.general/actions)
|
||||
[](https://codecov.io/gh/ansible-collections/community.general)
|
||||
[](https://api.reuse.software/info/github.com/ansible-collections/community.general)
|
||||
|
||||
@@ -24,21 +23,9 @@ We follow [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/comm
|
||||
|
||||
If you encounter abusive behavior violating the [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html), please refer to the [policy violations](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html#policy-violations) section of the Code of Conduct for information on how to raise a complaint.
|
||||
|
||||
## Communication
|
||||
|
||||
* Join the Ansible forum:
|
||||
* [Get Help](https://forum.ansible.com/c/help/6): get help or help others. This is for questions about modules or plugins in the collection. Please add appropriate tags if you start new discussions.
|
||||
* [Tag `community-general`](https://forum.ansible.com/tag/community-general): discuss the *collection itself*, instead of specific modules or plugins.
|
||||
* [Social Spaces](https://forum.ansible.com/c/chat/4): gather and interact with fellow enthusiasts.
|
||||
* [News & Announcements](https://forum.ansible.com/c/news/5): track project-wide announcements including social events.
|
||||
|
||||
* The Ansible [Bullhorn newsletter](https://docs.ansible.com/ansible/devel/community/communication.html#the-bullhorn): used to announce releases and important changes.
|
||||
|
||||
For more information about communication, see the [Ansible communication guide](https://docs.ansible.com/ansible/devel/community/communication.html).
|
||||
|
||||
## Tested with Ansible
|
||||
|
||||
Tested with the current ansible-core 2.13, ansible-core 2.14, ansible-core 2.15, ansible-core 2.16, ansible-core 2.17, and ansible-core 2.18 releases of ansible-core. Ansible-core versions before 2.13.0 are not supported. This includes all ansible-base 2.10 and Ansible 2.9 releases.
|
||||
Tested with the current ansible-core 2.13, ansible-core 2.14, ansible-core 2.15, ansible-core 2.16, ansible-core 2.17 releases and the current development version of ansible-core. Ansible-core versions before 2.13.0 are not supported. This includes all ansible-base 2.10 and Ansible 2.9 releases.
|
||||
|
||||
## External requirements
|
||||
|
||||
@@ -111,13 +98,25 @@ It is necessary for maintainers of this collection to be subscribed to:
|
||||
|
||||
They also should be subscribed to Ansible's [The Bullhorn newsletter](https://docs.ansible.com/ansible/devel/community/communication.html#the-bullhorn).
|
||||
|
||||
## Communication
|
||||
|
||||
We announce important development changes and releases through Ansible's [The Bullhorn newsletter](https://eepurl.com/gZmiEP). If you are a collection developer, be sure you are subscribed.
|
||||
|
||||
Join us in the `#ansible` (general use questions and support), `#ansible-community` (community and collection development questions), and other [IRC channels](https://docs.ansible.com/ansible/devel/community/communication.html#irc-channels) on [Libera.chat](https://libera.chat).
|
||||
|
||||
We take part in the global quarterly [Ansible Contributor Summit](https://github.com/ansible/community/wiki/Contributor-Summit) virtually or in-person. Track [The Bullhorn newsletter](https://eepurl.com/gZmiEP) and join us.
|
||||
|
||||
For more information about communities, meetings and agendas see [Community Wiki](https://github.com/ansible/community/wiki/Community).
|
||||
|
||||
For more information about communication, refer to Ansible's the [Communication guide](https://docs.ansible.com/ansible/devel/community/communication.html).
|
||||
|
||||
## Publishing New Version
|
||||
|
||||
See the [Releasing guidelines](https://github.com/ansible/community-docs/blob/main/releasing_collections.rst) to learn how to release this collection.
|
||||
|
||||
## Release notes
|
||||
|
||||
See the [changelog](https://github.com/ansible-collections/community.general/blob/stable-8/CHANGELOG.md).
|
||||
See the [changelog](https://github.com/ansible-collections/community.general/blob/stable-9/CHANGELOG.md).
|
||||
|
||||
## Roadmap
|
||||
|
||||
@@ -136,8 +135,8 @@ See [this issue](https://github.com/ansible-collections/community.general/issues
|
||||
|
||||
This collection is primarily licensed and distributed as a whole under the GNU General Public License v3.0 or later.
|
||||
|
||||
See [LICENSES/GPL-3.0-or-later.txt](https://github.com/ansible-collections/community.general/blob/main/COPYING) for the full text.
|
||||
See [LICENSES/GPL-3.0-or-later.txt](https://github.com/ansible-collections/community.general/blob/stable-9/COPYING) for the full text.
|
||||
|
||||
Parts of the collection are licensed under the [BSD 2-Clause license](https://github.com/ansible-collections/community.general/blob/main/LICENSES/BSD-2-Clause.txt), the [MIT license](https://github.com/ansible-collections/community.general/blob/main/LICENSES/MIT.txt), and the [PSF 2.0 license](https://github.com/ansible-collections/community.general/blob/main/LICENSES/PSF-2.0.txt).
|
||||
Parts of the collection are licensed under the [BSD 2-Clause license](https://github.com/ansible-collections/community.general/blob/stable-9/LICENSES/BSD-2-Clause.txt), the [MIT license](https://github.com/ansible-collections/community.general/blob/stable-9/LICENSES/MIT.txt), and the [PSF 2.0 license](https://github.com/ansible-collections/community.general/blob/stable-9/LICENSES/PSF-2.0.txt).
|
||||
|
||||
All files have a machine readable `SDPX-License-Identifier:` comment denoting its respective license(s) or an equivalent entry in an accompanying `.license` file. Only changelog fragments (which will not be part of a release) are covered by a blanket statement in `REUSE.toml`. This conforms to the [REUSE specification](https://reuse.software/spec/).
|
||||
All files have a machine readable `SDPX-License-Identifier:` comment denoting its respective license(s) or an equivalent entry in an accompanying `.license` file. Only changelog fragments (which will not be part of a release) are covered by a blanket statement in `.reuse/dep5`. This conforms to the [REUSE specification](https://reuse.software/spec/).
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -18,25 +18,23 @@ output_formats:
|
||||
prelude_section_name: release_summary
|
||||
prelude_section_title: Release Summary
|
||||
sections:
|
||||
- - major_changes
|
||||
- Major Changes
|
||||
- - minor_changes
|
||||
- Minor Changes
|
||||
- - breaking_changes
|
||||
- Breaking Changes / Porting Guide
|
||||
- - deprecated_features
|
||||
- Deprecated Features
|
||||
- - removed_features
|
||||
- Removed Features (previously deprecated)
|
||||
- - security_fixes
|
||||
- Security Fixes
|
||||
- - bugfixes
|
||||
- Bugfixes
|
||||
- - known_issues
|
||||
- Known Issues
|
||||
- - major_changes
|
||||
- Major Changes
|
||||
- - minor_changes
|
||||
- Minor Changes
|
||||
- - breaking_changes
|
||||
- Breaking Changes / Porting Guide
|
||||
- - deprecated_features
|
||||
- Deprecated Features
|
||||
- - removed_features
|
||||
- Removed Features (previously deprecated)
|
||||
- - security_fixes
|
||||
- Security Fixes
|
||||
- - bugfixes
|
||||
- Bugfixes
|
||||
- - known_issues
|
||||
- Known Issues
|
||||
title: Community General
|
||||
trivial_section_name: trivial
|
||||
use_fqcn: true
|
||||
add_plugin_period: true
|
||||
changelog_nice_yaml: true
|
||||
changelog_sort: version
|
||||
|
||||
@@ -14,3 +14,7 @@ sections:
|
||||
- guide_online
|
||||
- guide_packet
|
||||
- guide_scaleway
|
||||
- title: Developer Guides
|
||||
toctree:
|
||||
- guide_deps
|
||||
- guide_vardict
|
||||
|
||||
@@ -9,8 +9,6 @@ edit_on_github:
|
||||
path_prefix: ''
|
||||
|
||||
extra_links:
|
||||
- description: Ask for help
|
||||
url: https://forum.ansible.com/c/help/6/none
|
||||
- description: Submit a bug report
|
||||
url: https://github.com/ansible-collections/community.general/issues/new?assignees=&labels=&template=bug_report.yml
|
||||
- description: Request a feature
|
||||
@@ -24,10 +22,6 @@ communication:
|
||||
- topic: General usage and support questions
|
||||
network: Libera
|
||||
channel: '#ansible'
|
||||
forums:
|
||||
- topic: "Ansible Forum: General usage and support questions"
|
||||
# The following URL directly points to the "Get Help" section
|
||||
url: https://forum.ansible.com/c/help/6/none
|
||||
- topic: "Ansible Forum: Discussions about the collection itself, not for specific modules or plugins"
|
||||
# The following URL directly points to the "community-general" tag
|
||||
url: https://forum.ansible.com/tag/community-general
|
||||
mailing_lists:
|
||||
- topic: Ansible Project List
|
||||
url: https://groups.google.com/g/ansible-project
|
||||
|
||||
@@ -65,7 +65,7 @@ All three statements are equivalent and give:
|
||||
|
||||
.. note:: Be aware that in most cases, filter calls without any argument require ``flatten=true``, otherwise the input is returned as result. The reason for this is, that the input is considered as a variable argument and is wrapped by an additional outer list. ``flatten=true`` ensures that this list is removed before the input is processed by the filter logic.
|
||||
|
||||
The filters :ansplugin:`community.general.lists_difference#filter` or :ansplugin:`community.general.lists_symmetric_difference#filter` can be used in the same way as the filters in the examples above. They calculate the difference or the symmetric difference between two or more lists and preserve the item order.
|
||||
The filters ansplugin:`community.general.lists_difference#filter` or :ansplugin:`community.general.lists_symmetric_difference#filter` can be used in the same way as the filters in the examples above. They calculate the difference or the symmetric difference between two or more lists and preserve the item order.
|
||||
|
||||
For example, the symmetric difference of ``A``, ``B`` and ``C`` may be written as:
|
||||
|
||||
|
||||
@@ -124,7 +124,7 @@ To get a hash map with all ports and names of a cluster:
|
||||
var: item
|
||||
loop: "{{ domain_definition | community.general.json_query(server_name_cluster1_query) }}"
|
||||
vars:
|
||||
server_name_cluster1_query: "domain.server[?cluster=='cluster1'].{name: name, port: port}"
|
||||
server_name_cluster1_query: "domain.server[?cluster=='cluster2'].{name: name, port: port}"
|
||||
|
||||
To extract ports from all clusters with name starting with 'server1':
|
||||
|
||||
|
||||
74
docs/docsite/rst/guide_deps.rst
Normal file
74
docs/docsite/rst/guide_deps.rst
Normal file
@@ -0,0 +1,74 @@
|
||||
..
|
||||
Copyright (c) Ansible Project
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
.. _ansible_collections.community.general.docsite.guide_deps:
|
||||
|
||||
``deps`` Guide
|
||||
==============
|
||||
|
||||
|
||||
Using ``deps``
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
The ``ansible_collections.community.general.plugins.module_utils.deps`` module util simplifies
|
||||
the importing of code as described in :ref:`Importing and using shared code <shared_code>`.
|
||||
Please notice that ``deps`` is meant to be used specifically with Ansible modules, and not other types of plugins.
|
||||
|
||||
The same example from the Developer Guide would become:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils import deps
|
||||
|
||||
with deps.declare("foo"):
|
||||
import foo
|
||||
|
||||
Then in ``main()``, just after the argspec (or anywhere in the code, for that matter), do
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
deps.validate(module) # assuming module is a valid AnsibleModule instance
|
||||
|
||||
By default, ``deps`` will rely on ``ansible.module_utils.basic.missing_required_lib`` to generate
|
||||
a message about a failing import. That function accepts parameters ``reason`` and ``url``, and
|
||||
and so does ``deps```:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with deps.declare("foo", reason="foo is needed to properly bar", url="https://foo.bar.io"):
|
||||
import foo
|
||||
|
||||
If you would rather write a custom message instead of using ``missing_required_lib`` then do:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with deps.declare("foo", msg="Custom msg explaining why foo is needed"):
|
||||
import foo
|
||||
|
||||
``deps`` allows for multiple dependencies to be declared:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with deps.declare("foo"):
|
||||
import foo
|
||||
|
||||
with deps.declare("bar"):
|
||||
import bar
|
||||
|
||||
with deps.declare("doe"):
|
||||
import doe
|
||||
|
||||
By default, ``deps.validate()`` will check on all the declared dependencies, but if so desired,
|
||||
they can be validated selectively by doing:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
deps.validate(module, "foo") # only validates the "foo" dependency
|
||||
|
||||
deps.validate(module, "doe:bar") # only validates the "doe" and "bar" dependencies
|
||||
|
||||
deps.validate(module, "-doe:bar") # validates all dependencies except "doe" and "bar"
|
||||
|
||||
.. versionadded:: 6.1.0
|
||||
176
docs/docsite/rst/guide_vardict.rst
Normal file
176
docs/docsite/rst/guide_vardict.rst
Normal file
@@ -0,0 +1,176 @@
|
||||
..
|
||||
Copyright (c) Ansible Project
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
.. _ansible_collections.community.general.docsite.guide_vardict:
|
||||
|
||||
VarDict Guide
|
||||
=============
|
||||
|
||||
Introduction
|
||||
^^^^^^^^^^^^
|
||||
|
||||
The ``ansible_collections.community.general.plugins.module_utils.vardict`` module util provides the
|
||||
``VarDict`` class to help manage the module variables. That class is a container for module variables,
|
||||
especially the ones for which the module must keep track of state changes, and the ones that should
|
||||
be published as return values.
|
||||
|
||||
Each variable has extra behaviors controlled by associated metadata, simplifying the generation of
|
||||
output values from the module.
|
||||
|
||||
Quickstart
|
||||
""""""""""
|
||||
|
||||
The simplest way of using ``VarDict`` is:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.vardict import VarDict
|
||||
|
||||
Then in ``main()``, or any other function called from there:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
vars = VarDict()
|
||||
|
||||
# Next 3 statements are equivalent
|
||||
vars.abc = 123
|
||||
vars["abc"] = 123
|
||||
vars.set("abc", 123)
|
||||
|
||||
vars.xyz = "bananas"
|
||||
vars.ghi = False
|
||||
|
||||
And by the time the module is about to exit:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
results = vars.output()
|
||||
module.exit_json(**results)
|
||||
|
||||
That makes the return value of the module:
|
||||
|
||||
.. code-block:: javascript
|
||||
|
||||
{
|
||||
"abc": 123,
|
||||
"xyz": "bananas",
|
||||
"ghi": false
|
||||
}
|
||||
|
||||
Metadata
|
||||
""""""""
|
||||
|
||||
The metadata values associated with each variable are:
|
||||
|
||||
- ``output: bool`` - marks the variable for module output as a module return value.
|
||||
- ``fact: bool`` - marks the variable for module output as an Ansible fact.
|
||||
- ``verbosity: int`` - sets the minimum level of verbosity for which the variable will be included in the output.
|
||||
- ``change: bool`` - controls the detection of changes in the variable value.
|
||||
- ``initial_value: any`` - when using ``change`` and need to forcefully set an intial value to the variable.
|
||||
- ``diff: bool`` - used along with ``change``, this generates an Ansible-style diff ``dict``.
|
||||
|
||||
See the sections below for more details on how to use the metadata.
|
||||
|
||||
|
||||
Using VarDict
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
Basic Usage
|
||||
"""""""""""
|
||||
|
||||
As shown above, variables can be accessed using the ``[]`` operator, as in a ``dict`` object,
|
||||
and also as an object attribute, such as ``vars.abc``. The form using the ``set()``
|
||||
method is special in the sense that you can use it to set metadata values:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
vars.set("abc", 123, output=False)
|
||||
vars.set("abc", 123, output=True, change=True)
|
||||
|
||||
Another way to set metadata after the variables have been created is:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
vars.set_meta("abc", output=False)
|
||||
vars.set_meta("abc", output=True, change=True, diff=True)
|
||||
|
||||
You can use either operator and attribute forms to access the value of the variable. Other ways to
|
||||
access its value and its metadata are:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
print("abc value = {0}".format(vars.var("abc")["value"])) # get the value
|
||||
print("abc output? {0}".format(vars.get_meta("abc")["output"])) # get the metadata like this
|
||||
|
||||
The names of methods, such as ``set``, ``get_meta``, ``output`` amongst others, are reserved and
|
||||
cannot be used as variable names. If you try to use a reserved name a ``ValueError`` exception
|
||||
is raised with the message "Name <var> is reserved".
|
||||
|
||||
Generating output
|
||||
"""""""""""""""""
|
||||
|
||||
By default, every variable create will be enable for output with minimum verbosity set to zero, in
|
||||
other words, they will always be in the output by default.
|
||||
|
||||
You can control that when creating the variable for the first time or later in the code:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
vars.set("internal", x + 4, output=False)
|
||||
vars.set_meta("internal", output=False)
|
||||
|
||||
You can also set the verbosity of some variable, like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
vars.set("abc", x + 4)
|
||||
vars.set("debug_x", x, verbosity=3)
|
||||
|
||||
results = vars.output(module._verbosity)
|
||||
module.exit_json(**results)
|
||||
|
||||
If the module was invoked with verbosity lower than 3, then the output will only contain
|
||||
the variable ``abc``. If running at higher verbosity, as in ``ansible-playbook -vvv``,
|
||||
then the output will also contain ``debug_x``.
|
||||
|
||||
Generating facts is very similar to regular output, but variables are not marked as facts by default.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
vars.set("modulefact", x + 4, fact=True)
|
||||
vars.set("debugfact", x, fact=True, verbosity=3)
|
||||
|
||||
results = vars.output(module._verbosity)
|
||||
results["ansible_facts"] = {"module_name": vars.facts(module._verbosity)}
|
||||
module.exit_json(**results)
|
||||
|
||||
Handling change
|
||||
"""""""""""""""
|
||||
|
||||
You can use ``VarDict`` to determine whether variables have had their values changed.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
vars.set("abc", 42, change=True)
|
||||
vars.abc = 90
|
||||
|
||||
results = vars.output()
|
||||
results["changed"] = vars.has_changed
|
||||
module.exit_json(**results)
|
||||
|
||||
If tracking changes in variables, you may want to present the difference between the initial and the final
|
||||
values of it. For that, you want to use:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
vars.set("abc", 42, change=True, diff=True)
|
||||
vars.abc = 90
|
||||
|
||||
results = vars.output()
|
||||
results["changed"] = vars.has_changed
|
||||
results["diff"] = vars.diff()
|
||||
module.exit_json(**results)
|
||||
|
||||
.. versionadded:: 7.1.0
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
namespace: community
|
||||
name: general
|
||||
version: 8.6.11
|
||||
version: 9.1.0
|
||||
readme: README.md
|
||||
authors:
|
||||
- Ansible (https://github.com/ansible)
|
||||
|
||||
4101
meta/runtime.yml
4101
meta/runtime.yml
File diff suppressed because it is too large
Load Diff
@@ -88,10 +88,6 @@ class ActionModule(ActionBase):
|
||||
max_timeout = self._connection._play_context.timeout
|
||||
module_args = self._task.args
|
||||
|
||||
async_status_args = {}
|
||||
starter_cmd = None
|
||||
confirm_cmd = None
|
||||
|
||||
if module_args.get('state', None) == 'restored':
|
||||
if not wrap_async:
|
||||
if not check_mode:
|
||||
|
||||
@@ -78,12 +78,13 @@ DOCUMENTATION = '''
|
||||
EXAMPLES = r'''
|
||||
# A polkit rule needed to use the module with a non-root user.
|
||||
# See the Notes section for details.
|
||||
60-machinectl-fast-user-auth.rules: |
|
||||
polkit.addRule(function(action, subject) {
|
||||
if(action.id == "org.freedesktop.machine1.host-shell" && subject.isInGroup("wheel")) {
|
||||
return polkit.Result.AUTH_SELF_KEEP;
|
||||
}
|
||||
});
|
||||
/etc/polkit-1/rules.d/60-machinectl-fast-user-auth.rules: |
|
||||
polkit.addRule(function(action, subject) {
|
||||
if(action.id == "org.freedesktop.machine1.host-shell" &&
|
||||
subject.isInGroup("wheel")) {
|
||||
return polkit.Result.AUTH_SELF_KEEP;
|
||||
}
|
||||
});
|
||||
'''
|
||||
|
||||
from re import compile as re_compile
|
||||
|
||||
128
plugins/become/run0.py
Normal file
128
plugins/become/run0.py
Normal file
@@ -0,0 +1,128 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2024, Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = """
|
||||
name: run0
|
||||
short_description: Systemd's run0
|
||||
description:
|
||||
- This become plugins allows your remote/login user to execute commands as another user via the C(run0) utility.
|
||||
author:
|
||||
- Thomas Sjögren (@konstruktoid)
|
||||
version_added: '9.0.0'
|
||||
options:
|
||||
become_user:
|
||||
description: User you 'become' to execute the task.
|
||||
default: root
|
||||
ini:
|
||||
- section: privilege_escalation
|
||||
key: become_user
|
||||
- section: run0_become_plugin
|
||||
key: user
|
||||
vars:
|
||||
- name: ansible_become_user
|
||||
- name: ansible_run0_user
|
||||
env:
|
||||
- name: ANSIBLE_BECOME_USER
|
||||
- name: ANSIBLE_RUN0_USER
|
||||
type: string
|
||||
become_exe:
|
||||
description: The C(run0) executable.
|
||||
default: run0
|
||||
ini:
|
||||
- section: privilege_escalation
|
||||
key: become_exe
|
||||
- section: run0_become_plugin
|
||||
key: executable
|
||||
vars:
|
||||
- name: ansible_become_exe
|
||||
- name: ansible_run0_exe
|
||||
env:
|
||||
- name: ANSIBLE_BECOME_EXE
|
||||
- name: ANSIBLE_RUN0_EXE
|
||||
type: string
|
||||
become_flags:
|
||||
description: Options to pass to run0.
|
||||
default: ''
|
||||
ini:
|
||||
- section: privilege_escalation
|
||||
key: become_flags
|
||||
- section: run0_become_plugin
|
||||
key: flags
|
||||
vars:
|
||||
- name: ansible_become_flags
|
||||
- name: ansible_run0_flags
|
||||
env:
|
||||
- name: ANSIBLE_BECOME_FLAGS
|
||||
- name: ANSIBLE_RUN0_FLAGS
|
||||
type: string
|
||||
notes:
|
||||
- This plugin will only work when a polkit rule is in place.
|
||||
"""
|
||||
|
||||
EXAMPLES = r"""
|
||||
# An example polkit rule that allows the user 'ansible' in the 'wheel' group
|
||||
# to execute commands using run0 without authentication.
|
||||
/etc/polkit-1/rules.d/60-run0-fast-user-auth.rules: |
|
||||
polkit.addRule(function(action, subject) {
|
||||
if(action.id == "org.freedesktop.systemd1.manage-units" &&
|
||||
subject.isInGroup("wheel") &&
|
||||
subject.user == "ansible") {
|
||||
return polkit.Result.YES;
|
||||
}
|
||||
});
|
||||
"""
|
||||
|
||||
from re import compile as re_compile
|
||||
|
||||
from ansible.plugins.become import BecomeBase
|
||||
from ansible.module_utils._text import to_bytes
|
||||
|
||||
ansi_color_codes = re_compile(to_bytes(r"\x1B\[[0-9;]+m"))
|
||||
|
||||
|
||||
class BecomeModule(BecomeBase):
|
||||
|
||||
name = "community.general.run0"
|
||||
|
||||
prompt = "Password: "
|
||||
fail = ("==== AUTHENTICATION FAILED ====",)
|
||||
success = ("==== AUTHENTICATION COMPLETE ====",)
|
||||
require_tty = (
|
||||
True # see https://github.com/ansible-collections/community.general/issues/6932
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def remove_ansi_codes(line):
|
||||
return ansi_color_codes.sub(b"", line)
|
||||
|
||||
def build_become_command(self, cmd, shell):
|
||||
super().build_become_command(cmd, shell)
|
||||
|
||||
if not cmd:
|
||||
return cmd
|
||||
|
||||
become = self.get_option("become_exe")
|
||||
flags = self.get_option("become_flags")
|
||||
user = self.get_option("become_user")
|
||||
|
||||
return (
|
||||
f"{become} --user={user} {flags} {self._build_success_command(cmd, shell)}"
|
||||
)
|
||||
|
||||
def check_success(self, b_output):
|
||||
b_output = self.remove_ansi_codes(b_output)
|
||||
return super().check_success(b_output)
|
||||
|
||||
def check_incorrect_password(self, b_output):
|
||||
b_output = self.remove_ansi_codes(b_output)
|
||||
return super().check_incorrect_password(b_output)
|
||||
|
||||
def check_missing_password(self, b_output):
|
||||
b_output = self.remove_ansi_codes(b_output)
|
||||
return super().check_missing_password(b_output)
|
||||
@@ -84,6 +84,33 @@ DOCUMENTATION = '''
|
||||
- section: callback_opentelemetry
|
||||
key: disable_attributes_in_logs
|
||||
version_added: 7.1.0
|
||||
store_spans_in_file:
|
||||
default: None
|
||||
type: str
|
||||
description:
|
||||
- It stores the exported spans in the given file
|
||||
env:
|
||||
- name: ANSIBLE_OPENTELEMETRY_STORE_SPANS_IN_FILE
|
||||
ini:
|
||||
- section: callback_opentelemetry
|
||||
key: store_spans_in_file
|
||||
version_added: 9.0.0
|
||||
otel_exporter_otlp_traces_protocol:
|
||||
type: str
|
||||
description:
|
||||
- E(OTEL_EXPORTER_OTLP_TRACES_PROTOCOL) represents the the transport protocol for spans.
|
||||
- See
|
||||
U(https://opentelemetry-python.readthedocs.io/en/latest/sdk/environment_variables.html#envvar-OTEL_EXPORTER_OTLP_TRACES_PROTOCOL).
|
||||
default: grpc
|
||||
choices:
|
||||
- grpc
|
||||
- http/protobuf
|
||||
env:
|
||||
- name: OTEL_EXPORTER_OTLP_TRACES_PROTOCOL
|
||||
ini:
|
||||
- section: callback_opentelemetry
|
||||
key: otel_exporter_otlp_traces_protocol
|
||||
version_added: 9.0.0
|
||||
requirements:
|
||||
- opentelemetry-api (Python library)
|
||||
- opentelemetry-exporter-otlp (Python library)
|
||||
@@ -107,6 +134,7 @@ examples: |
|
||||
'''
|
||||
|
||||
import getpass
|
||||
import json
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
@@ -124,15 +152,19 @@ from ansible.plugins.callback import CallbackBase
|
||||
try:
|
||||
from opentelemetry import trace
|
||||
from opentelemetry.trace import SpanKind
|
||||
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
|
||||
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter as GRPCOTLPSpanExporter
|
||||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter as HTTPOTLPSpanExporter
|
||||
from opentelemetry.sdk.resources import SERVICE_NAME, Resource
|
||||
from opentelemetry.trace.status import Status, StatusCode
|
||||
from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
|
||||
from opentelemetry.sdk.trace import TracerProvider
|
||||
from opentelemetry.sdk.trace.export import (
|
||||
BatchSpanProcessor
|
||||
BatchSpanProcessor,
|
||||
SimpleSpanProcessor
|
||||
)
|
||||
from opentelemetry.sdk.trace.export.in_memory_span_exporter import (
|
||||
InMemorySpanExporter
|
||||
)
|
||||
|
||||
# Support for opentelemetry-api <= 1.12
|
||||
try:
|
||||
from opentelemetry.util._time import _time_ns
|
||||
@@ -255,7 +287,16 @@ class OpenTelemetrySource(object):
|
||||
task.dump = dump
|
||||
task.add_host(HostData(host_uuid, host_name, status, result))
|
||||
|
||||
def generate_distributed_traces(self, otel_service_name, ansible_playbook, tasks_data, status, traceparent, disable_logs, disable_attributes_in_logs):
|
||||
def generate_distributed_traces(self,
|
||||
otel_service_name,
|
||||
ansible_playbook,
|
||||
tasks_data,
|
||||
status,
|
||||
traceparent,
|
||||
disable_logs,
|
||||
disable_attributes_in_logs,
|
||||
otel_exporter_otlp_traces_protocol,
|
||||
store_spans_in_file):
|
||||
""" generate distributed traces from the collected TaskData and HostData """
|
||||
|
||||
tasks = []
|
||||
@@ -271,7 +312,16 @@ class OpenTelemetrySource(object):
|
||||
)
|
||||
)
|
||||
|
||||
processor = BatchSpanProcessor(OTLPSpanExporter())
|
||||
otel_exporter = None
|
||||
if store_spans_in_file:
|
||||
otel_exporter = InMemorySpanExporter()
|
||||
processor = SimpleSpanProcessor(otel_exporter)
|
||||
else:
|
||||
if otel_exporter_otlp_traces_protocol == 'grpc':
|
||||
otel_exporter = GRPCOTLPSpanExporter()
|
||||
else:
|
||||
otel_exporter = HTTPOTLPSpanExporter()
|
||||
processor = BatchSpanProcessor(otel_exporter)
|
||||
|
||||
trace.get_tracer_provider().add_span_processor(processor)
|
||||
|
||||
@@ -293,6 +343,8 @@ class OpenTelemetrySource(object):
|
||||
with tracer.start_as_current_span(task.name, start_time=task.start, end_on_exit=False) as span:
|
||||
self.update_span_data(task, host_data, span, disable_logs, disable_attributes_in_logs)
|
||||
|
||||
return otel_exporter
|
||||
|
||||
def update_span_data(self, task_data, host_data, span, disable_logs, disable_attributes_in_logs):
|
||||
""" update the span with the given TaskData and HostData """
|
||||
|
||||
@@ -304,7 +356,6 @@ class OpenTelemetrySource(object):
|
||||
status = Status(status_code=StatusCode.OK)
|
||||
if host_data.status != 'included':
|
||||
# Support loops
|
||||
enriched_error_message = None
|
||||
if 'results' in host_data.result._result:
|
||||
if host_data.status == 'failed':
|
||||
message = self.get_error_message_from_results(host_data.result._result['results'], task_data.action)
|
||||
@@ -464,6 +515,8 @@ class CallbackModule(CallbackBase):
|
||||
self.errors = 0
|
||||
self.disabled = False
|
||||
self.traceparent = False
|
||||
self.store_spans_in_file = False
|
||||
self.otel_exporter_otlp_traces_protocol = None
|
||||
|
||||
if OTEL_LIBRARY_IMPORT_ERROR:
|
||||
raise_from(
|
||||
@@ -491,6 +544,8 @@ class CallbackModule(CallbackBase):
|
||||
|
||||
self.disable_logs = self.get_option('disable_logs')
|
||||
|
||||
self.store_spans_in_file = self.get_option('store_spans_in_file')
|
||||
|
||||
self.otel_service_name = self.get_option('otel_service_name')
|
||||
|
||||
if not self.otel_service_name:
|
||||
@@ -499,6 +554,8 @@ class CallbackModule(CallbackBase):
|
||||
# See https://github.com/open-telemetry/opentelemetry-specification/issues/740
|
||||
self.traceparent = self.get_option('traceparent')
|
||||
|
||||
self.otel_exporter_otlp_traces_protocol = self.get_option('otel_exporter_otlp_traces_protocol')
|
||||
|
||||
def dump_results(self, task, result):
|
||||
""" dump the results if disable_logs is not enabled """
|
||||
if self.disable_logs:
|
||||
@@ -594,15 +651,22 @@ class CallbackModule(CallbackBase):
|
||||
status = Status(status_code=StatusCode.OK)
|
||||
else:
|
||||
status = Status(status_code=StatusCode.ERROR)
|
||||
self.opentelemetry.generate_distributed_traces(
|
||||
otel_exporter = self.opentelemetry.generate_distributed_traces(
|
||||
self.otel_service_name,
|
||||
self.ansible_playbook,
|
||||
self.tasks_data,
|
||||
status,
|
||||
self.traceparent,
|
||||
self.disable_logs,
|
||||
self.disable_attributes_in_logs
|
||||
self.disable_attributes_in_logs,
|
||||
self.otel_exporter_otlp_traces_protocol,
|
||||
self.store_spans_in_file
|
||||
)
|
||||
|
||||
if self.store_spans_in_file:
|
||||
spans = [json.loads(span.to_json()) for span in otel_exporter.get_finished_spans()]
|
||||
with open(self.store_spans_in_file, "w", encoding="utf-8") as output:
|
||||
json.dump({"spans": spans}, output, indent=4)
|
||||
|
||||
def v2_runner_on_async_failed(self, result, **kwargs):
|
||||
self.errors += 1
|
||||
|
||||
127
plugins/callback/timestamp.py
Normal file
127
plugins/callback/timestamp.py
Normal file
@@ -0,0 +1,127 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2024, kurokobo <kurokobo@protonmail.com>
|
||||
# Copyright (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = r"""
|
||||
name: timestamp
|
||||
type: stdout
|
||||
short_description: Adds simple timestamp for each header
|
||||
version_added: 9.0.0
|
||||
description:
|
||||
- This callback adds simple timestamp for each header.
|
||||
author: kurokobo (@kurokobo)
|
||||
options:
|
||||
timezone:
|
||||
description:
|
||||
- Timezone to use for the timestamp in IANA time zone format.
|
||||
- For example C(America/New_York), C(Asia/Tokyo)). Ignored on Python < 3.9.
|
||||
ini:
|
||||
- section: callback_timestamp
|
||||
key: timezone
|
||||
env:
|
||||
- name: ANSIBLE_CALLBACK_TIMESTAMP_TIMEZONE
|
||||
type: string
|
||||
format_string:
|
||||
description:
|
||||
- Format of the timestamp shown to user in 1989 C standard format.
|
||||
- >
|
||||
Refer to L(the Python documentation,https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes)
|
||||
for the available format codes.
|
||||
ini:
|
||||
- section: callback_timestamp
|
||||
key: format_string
|
||||
env:
|
||||
- name: ANSIBLE_CALLBACK_TIMESTAMP_FORMAT_STRING
|
||||
default: "%H:%M:%S"
|
||||
type: string
|
||||
seealso:
|
||||
- plugin: ansible.posix.profile_tasks
|
||||
plugin_type: callback
|
||||
description: >
|
||||
You can use P(ansible.posix.profile_tasks#callback) callback plugin to time individual tasks and overall execution time
|
||||
with detailed timestamps.
|
||||
extends_documentation_fragment:
|
||||
- ansible.builtin.default_callback
|
||||
- ansible.builtin.result_format_callback
|
||||
"""
|
||||
|
||||
|
||||
from ansible.plugins.callback.default import CallbackModule as Default
|
||||
from ansible.utils.display import get_text_width
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
from datetime import datetime
|
||||
import types
|
||||
import sys
|
||||
|
||||
# Store whether the zoneinfo module is available
|
||||
_ZONEINFO_AVAILABLE = sys.version_info >= (3, 9)
|
||||
|
||||
|
||||
def get_datetime_now(tz):
|
||||
"""
|
||||
Returns the current timestamp with the specified timezone
|
||||
"""
|
||||
return datetime.now(tz=tz)
|
||||
|
||||
|
||||
def banner(self, msg, color=None, cows=True):
|
||||
"""
|
||||
Prints a header-looking line with cowsay or stars with length depending on terminal width (3 minimum) with trailing timestamp
|
||||
|
||||
Based on the banner method of Display class from ansible.utils.display
|
||||
|
||||
https://github.com/ansible/ansible/blob/4403519afe89138042108e237aef317fd5f09c33/lib/ansible/utils/display.py#L511
|
||||
"""
|
||||
timestamp = get_datetime_now(self.timestamp_tzinfo).strftime(self.timestamp_format_string)
|
||||
timestamp_len = get_text_width(timestamp) + 1 # +1 for leading space
|
||||
|
||||
msg = to_text(msg)
|
||||
if self.b_cowsay and cows:
|
||||
try:
|
||||
self.banner_cowsay("%s @ %s" % (msg, timestamp))
|
||||
return
|
||||
except OSError:
|
||||
self.warning("somebody cleverly deleted cowsay or something during the PB run. heh.")
|
||||
|
||||
msg = msg.strip()
|
||||
try:
|
||||
star_len = self.columns - get_text_width(msg) - timestamp_len
|
||||
except EnvironmentError:
|
||||
star_len = self.columns - len(msg) - timestamp_len
|
||||
if star_len <= 3:
|
||||
star_len = 3
|
||||
stars = "*" * star_len
|
||||
self.display("\n%s %s %s" % (msg, stars, timestamp), color=color)
|
||||
|
||||
|
||||
class CallbackModule(Default):
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = "stdout"
|
||||
CALLBACK_NAME = "community.general.timestamp"
|
||||
|
||||
def __init__(self):
|
||||
super(CallbackModule, self).__init__()
|
||||
|
||||
# Replace the banner method of the display object with the custom one
|
||||
self._display.banner = types.MethodType(banner, self._display)
|
||||
|
||||
def set_options(self, task_keys=None, var_options=None, direct=None):
|
||||
super(CallbackModule, self).set_options(task_keys=task_keys, var_options=var_options, direct=direct)
|
||||
|
||||
# Store zoneinfo for specified timezone if available
|
||||
tzinfo = None
|
||||
if _ZONEINFO_AVAILABLE and self.get_option("timezone"):
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
tzinfo = ZoneInfo(self.get_option("timezone"))
|
||||
|
||||
# Inject options into the display object
|
||||
setattr(self._display, "timestamp_tzinfo", tzinfo)
|
||||
setattr(self._display, "timestamp_format_string", self.get_option("format_string"))
|
||||
@@ -56,5 +56,4 @@ attributes:
|
||||
support: full
|
||||
membership:
|
||||
- community.general.consul
|
||||
version_added: 8.3.0
|
||||
"""
|
||||
|
||||
62
plugins/doc_fragments/django.py
Normal file
62
plugins/doc_fragments/django.py
Normal file
@@ -0,0 +1,62 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2024, Alexei Znamensky <russoz@gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
class ModuleDocFragment(object):
|
||||
DOCUMENTATION = r'''
|
||||
options:
|
||||
venv:
|
||||
description:
|
||||
- Use the the Python interpreter from this virtual environment.
|
||||
- Pass the path to the root of the virtualenv, not the C(bin/) directory nor the C(python) executable.
|
||||
type: path
|
||||
settings:
|
||||
description:
|
||||
- Specifies the settings module to use.
|
||||
- The value will be passed as is to the C(--settings) argument in C(django-admin).
|
||||
type: str
|
||||
required: true
|
||||
pythonpath:
|
||||
description:
|
||||
- Adds the given filesystem path to the Python import search path.
|
||||
- The value will be passed as is to the C(--pythonpath) argument in C(django-admin).
|
||||
type: path
|
||||
traceback:
|
||||
description:
|
||||
- Provides a full stack trace in the output when a C(CommandError) is raised.
|
||||
type: bool
|
||||
verbosity:
|
||||
description:
|
||||
- Specifies the amount of notification and debug information in the output of C(django-admin).
|
||||
type: int
|
||||
choices: [0, 1, 2, 3]
|
||||
skip_checks:
|
||||
description:
|
||||
- Skips running system checks prior to running the command.
|
||||
type: bool
|
||||
|
||||
|
||||
notes:
|
||||
- The C(django-admin) command is always executed using the C(C) locale, and the option C(--no-color) is always passed.
|
||||
|
||||
seealso:
|
||||
- name: django-admin and manage.py in official Django documentation
|
||||
description: >-
|
||||
Refer to this documentation for the builtin commands and options of C(django-admin).
|
||||
Please make sure that you select the right version of Django in the version selector on that page.
|
||||
link: https://docs.djangoproject.com/en/5.0/ref/django-admin/
|
||||
'''
|
||||
|
||||
DATABASE = r'''
|
||||
options:
|
||||
database:
|
||||
description:
|
||||
- Specify the database to be used.
|
||||
type: str
|
||||
default: default
|
||||
'''
|
||||
@@ -16,6 +16,13 @@ options:
|
||||
- Specify the target host of the Proxmox VE cluster.
|
||||
type: str
|
||||
required: true
|
||||
api_port:
|
||||
description:
|
||||
- Specify the target port of the Proxmox VE cluster.
|
||||
- Uses the E(PROXMOX_PORT) environment variable if not specified.
|
||||
type: int
|
||||
required: false
|
||||
version_added: 9.1.0
|
||||
api_user:
|
||||
description:
|
||||
- Specify the user to authenticate with.
|
||||
@@ -65,3 +72,13 @@ options:
|
||||
- Add the new VM to the specified pool.
|
||||
type: str
|
||||
'''
|
||||
|
||||
ACTIONGROUP_PROXMOX = r"""
|
||||
options: {}
|
||||
attributes:
|
||||
action_group:
|
||||
description: Use C(group/community.general.proxmox) in C(module_defaults) to set defaults for this module.
|
||||
support: full
|
||||
membership:
|
||||
- community.general.proxmox
|
||||
"""
|
||||
|
||||
@@ -1,120 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2014, Matt Martz <matt@sivel.net>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
class ModuleDocFragment(object):
|
||||
|
||||
# Standard Rackspace only documentation fragment
|
||||
DOCUMENTATION = r'''
|
||||
options:
|
||||
api_key:
|
||||
description:
|
||||
- Rackspace API key, overrides O(credentials).
|
||||
type: str
|
||||
aliases: [ password ]
|
||||
credentials:
|
||||
description:
|
||||
- File to find the Rackspace credentials in. Ignored if O(api_key) and
|
||||
O(username) are provided.
|
||||
type: path
|
||||
aliases: [ creds_file ]
|
||||
env:
|
||||
description:
|
||||
- Environment as configured in C(~/.pyrax.cfg),
|
||||
see U(https://github.com/rackspace/pyrax/blob/master/docs/getting_started.md#pyrax-configuration).
|
||||
type: str
|
||||
region:
|
||||
description:
|
||||
- Region to create an instance in.
|
||||
type: str
|
||||
username:
|
||||
description:
|
||||
- Rackspace username, overrides O(credentials).
|
||||
type: str
|
||||
validate_certs:
|
||||
description:
|
||||
- Whether or not to require SSL validation of API endpoints.
|
||||
type: bool
|
||||
aliases: [ verify_ssl ]
|
||||
requirements:
|
||||
- pyrax
|
||||
notes:
|
||||
- The following environment variables can be used, E(RAX_USERNAME),
|
||||
E(RAX_API_KEY), E(RAX_CREDS_FILE), E(RAX_CREDENTIALS), E(RAX_REGION).
|
||||
- E(RAX_CREDENTIALS) and E(RAX_CREDS_FILE) point to a credentials file
|
||||
appropriate for pyrax. See U(https://github.com/rackspace/pyrax/blob/master/docs/getting_started.md#authenticating).
|
||||
- E(RAX_USERNAME) and E(RAX_API_KEY) obviate the use of a credentials file.
|
||||
- E(RAX_REGION) defines a Rackspace Public Cloud region (DFW, ORD, LON, ...).
|
||||
'''
|
||||
|
||||
# Documentation fragment including attributes to enable communication
|
||||
# of other OpenStack clouds. Not all rax modules support this.
|
||||
OPENSTACK = r'''
|
||||
options:
|
||||
api_key:
|
||||
type: str
|
||||
description:
|
||||
- Rackspace API key, overrides O(credentials).
|
||||
aliases: [ password ]
|
||||
auth_endpoint:
|
||||
type: str
|
||||
description:
|
||||
- The URI of the authentication service.
|
||||
- If not specified will be set to U(https://identity.api.rackspacecloud.com/v2.0/).
|
||||
credentials:
|
||||
type: path
|
||||
description:
|
||||
- File to find the Rackspace credentials in. Ignored if O(api_key) and
|
||||
O(username) are provided.
|
||||
aliases: [ creds_file ]
|
||||
env:
|
||||
type: str
|
||||
description:
|
||||
- Environment as configured in C(~/.pyrax.cfg),
|
||||
see U(https://github.com/rackspace/pyrax/blob/master/docs/getting_started.md#pyrax-configuration).
|
||||
identity_type:
|
||||
type: str
|
||||
description:
|
||||
- Authentication mechanism to use, such as rackspace or keystone.
|
||||
default: rackspace
|
||||
region:
|
||||
type: str
|
||||
description:
|
||||
- Region to create an instance in.
|
||||
tenant_id:
|
||||
type: str
|
||||
description:
|
||||
- The tenant ID used for authentication.
|
||||
tenant_name:
|
||||
type: str
|
||||
description:
|
||||
- The tenant name used for authentication.
|
||||
username:
|
||||
type: str
|
||||
description:
|
||||
- Rackspace username, overrides O(credentials).
|
||||
validate_certs:
|
||||
description:
|
||||
- Whether or not to require SSL validation of API endpoints.
|
||||
type: bool
|
||||
aliases: [ verify_ssl ]
|
||||
deprecated:
|
||||
removed_in: 9.0.0
|
||||
why: This module relies on the deprecated package pyrax.
|
||||
alternative: Use the Openstack modules instead.
|
||||
requirements:
|
||||
- pyrax
|
||||
notes:
|
||||
- The following environment variables can be used, E(RAX_USERNAME),
|
||||
E(RAX_API_KEY), E(RAX_CREDS_FILE), E(RAX_CREDENTIALS), E(RAX_REGION).
|
||||
- E(RAX_CREDENTIALS) and E(RAX_CREDS_FILE) points to a credentials file
|
||||
appropriate for pyrax. See U(https://github.com/rackspace/pyrax/blob/master/docs/getting_started.md#authenticating).
|
||||
- E(RAX_USERNAME) and E(RAX_API_KEY) obviate the use of a credentials file.
|
||||
- E(RAX_REGION) defines a Rackspace Public Cloud region (DFW, ORD, LON, ...).
|
||||
'''
|
||||
@@ -57,8 +57,8 @@ EXAMPLES = '''
|
||||
|
||||
RETURN = '''
|
||||
_value:
|
||||
description: A dictionary with the provided key-value pairs.
|
||||
type: dictionary
|
||||
description: The dictionary having the provided key-value pairs.
|
||||
type: boolean
|
||||
'''
|
||||
|
||||
|
||||
|
||||
@@ -13,8 +13,6 @@ DOCUMENTATION = '''
|
||||
author: Felix Fontein (@felixfontein)
|
||||
description:
|
||||
- Transform a sequence of dictionaries to a dictionary where the dictionaries are indexed by an attribute.
|
||||
- This filter is similar to the Jinja2 C(groupby) filter. Use the Jinja2 C(groupby) filter if you have multiple entries with the same value,
|
||||
or when you need a dictionary with list values, or when you need to use deeply nested attributes.
|
||||
positional: attribute
|
||||
options:
|
||||
_input:
|
||||
|
||||
138
plugins/filter/keep_keys.py
Normal file
138
plugins/filter/keep_keys.py
Normal file
@@ -0,0 +1,138 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2024 Vladimir Botka <vbotka@gmail.com>
|
||||
# Copyright (c) 2024 Felix Fontein <felix@fontein.de>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
name: keep_keys
|
||||
short_description: Keep specific keys from dictionaries in a list
|
||||
version_added: "9.1.0"
|
||||
author:
|
||||
- Vladimir Botka (@vbotka)
|
||||
- Felix Fontein (@felixfontein)
|
||||
description: This filter keeps only specified keys from a provided list of dictionaries.
|
||||
options:
|
||||
_input:
|
||||
description:
|
||||
- A list of dictionaries.
|
||||
- Top level keys must be strings.
|
||||
type: list
|
||||
elements: dictionary
|
||||
required: true
|
||||
target:
|
||||
description:
|
||||
- A single key or key pattern to keep, or a list of keys or keys patterns to keep.
|
||||
- If O(matching_parameter=regex) there must be exactly one pattern provided.
|
||||
type: raw
|
||||
required: true
|
||||
matching_parameter:
|
||||
description: Specify the matching option of target keys.
|
||||
type: str
|
||||
default: equal
|
||||
choices:
|
||||
equal: Matches keys of exactly one of the O(target) items.
|
||||
starts_with: Matches keys that start with one of the O(target) items.
|
||||
ends_with: Matches keys that end with one of the O(target) items.
|
||||
regex:
|
||||
- Matches keys that match the regular expresion provided in O(target).
|
||||
- In this case, O(target) must be a regex string or a list with single regex string.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
l:
|
||||
- {k0_x0: A0, k1_x1: B0, k2_x2: [C0], k3_x3: foo}
|
||||
- {k0_x0: A1, k1_x1: B1, k2_x2: [C1], k3_x3: bar}
|
||||
|
||||
# 1) By default match keys that equal any of the items in the target.
|
||||
t: [k0_x0, k1_x1]
|
||||
r: "{{ l | community.general.keep_keys(target=t) }}"
|
||||
|
||||
# 2) Match keys that start with any of the items in the target.
|
||||
t: [k0, k1]
|
||||
r: "{{ l | community.general.keep_keys(target=t, matching_parameter='starts_with') }}"
|
||||
|
||||
# 3) Match keys that end with any of the items in target.
|
||||
t: [x0, x1]
|
||||
r: "{{ l | community.general.keep_keys(target=t, matching_parameter='ends_with') }}"
|
||||
|
||||
# 4) Match keys by the regex.
|
||||
t: ['^.*[01]_x.*$']
|
||||
r: "{{ l | community.general.keep_keys(target=t, matching_parameter='regex') }}"
|
||||
|
||||
# 5) Match keys by the regex.
|
||||
t: '^.*[01]_x.*$'
|
||||
r: "{{ l | community.general.keep_keys(target=t, matching_parameter='regex') }}"
|
||||
|
||||
# The results of above examples 1-5 are all the same.
|
||||
r:
|
||||
- {k0_x0: A0, k1_x1: B0}
|
||||
- {k0_x0: A1, k1_x1: B1}
|
||||
|
||||
# 6) By default match keys that equal the target.
|
||||
t: k0_x0
|
||||
r: "{{ l | community.general.keep_keys(target=t) }}"
|
||||
|
||||
# 7) Match keys that start with the target.
|
||||
t: k0
|
||||
r: "{{ l | community.general.keep_keys(target=t, matching_parameter='starts_with') }}"
|
||||
|
||||
# 8) Match keys that end with the target.
|
||||
t: x0
|
||||
r: "{{ l | community.general.keep_keys(target=t, matching_parameter='ends_with') }}"
|
||||
|
||||
# 9) Match keys by the regex.
|
||||
t: '^.*0_x.*$'
|
||||
r: "{{ l | community.general.keep_keys(target=t, matching_parameter='regex') }}"
|
||||
|
||||
# The results of above examples 6-9 are all the same.
|
||||
r:
|
||||
- {k0_x0: A0}
|
||||
- {k0_x0: A1}
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
_value:
|
||||
description: The list of dictionaries with selected keys.
|
||||
type: list
|
||||
elements: dictionary
|
||||
'''
|
||||
|
||||
from ansible_collections.community.general.plugins.plugin_utils.keys_filter import (
|
||||
_keys_filter_params,
|
||||
_keys_filter_target_str)
|
||||
|
||||
|
||||
def keep_keys(data, target=None, matching_parameter='equal'):
|
||||
"""keep specific keys from dictionaries in a list"""
|
||||
|
||||
# test parameters
|
||||
_keys_filter_params(data, matching_parameter)
|
||||
# test and transform target
|
||||
tt = _keys_filter_target_str(target, matching_parameter)
|
||||
|
||||
if matching_parameter == 'equal':
|
||||
def keep_key(key):
|
||||
return key in tt
|
||||
elif matching_parameter == 'starts_with':
|
||||
def keep_key(key):
|
||||
return key.startswith(tt)
|
||||
elif matching_parameter == 'ends_with':
|
||||
def keep_key(key):
|
||||
return key.endswith(tt)
|
||||
elif matching_parameter == 'regex':
|
||||
def keep_key(key):
|
||||
return tt.match(key) is not None
|
||||
|
||||
return [dict((k, v) for k, v in d.items() if keep_key(k)) for d in data]
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
|
||||
def filters(self):
|
||||
return {
|
||||
'keep_keys': keep_keys,
|
||||
}
|
||||
138
plugins/filter/remove_keys.py
Normal file
138
plugins/filter/remove_keys.py
Normal file
@@ -0,0 +1,138 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2024 Vladimir Botka <vbotka@gmail.com>
|
||||
# Copyright (c) 2024 Felix Fontein <felix@fontein.de>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
name: remove_keys
|
||||
short_description: Remove specific keys from dictionaries in a list
|
||||
version_added: "9.1.0"
|
||||
author:
|
||||
- Vladimir Botka (@vbotka)
|
||||
- Felix Fontein (@felixfontein)
|
||||
description: This filter removes only specified keys from a provided list of dictionaries.
|
||||
options:
|
||||
_input:
|
||||
description:
|
||||
- A list of dictionaries.
|
||||
- Top level keys must be strings.
|
||||
type: list
|
||||
elements: dictionary
|
||||
required: true
|
||||
target:
|
||||
description:
|
||||
- A single key or key pattern to remove, or a list of keys or keys patterns to remove.
|
||||
- If O(matching_parameter=regex) there must be exactly one pattern provided.
|
||||
type: raw
|
||||
required: true
|
||||
matching_parameter:
|
||||
description: Specify the matching option of target keys.
|
||||
type: str
|
||||
default: equal
|
||||
choices:
|
||||
equal: Matches keys of exactly one of the O(target) items.
|
||||
starts_with: Matches keys that start with one of the O(target) items.
|
||||
ends_with: Matches keys that end with one of the O(target) items.
|
||||
regex:
|
||||
- Matches keys that match the regular expresion provided in O(target).
|
||||
- In this case, O(target) must be a regex string or a list with single regex string.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
l:
|
||||
- {k0_x0: A0, k1_x1: B0, k2_x2: [C0], k3_x3: foo}
|
||||
- {k0_x0: A1, k1_x1: B1, k2_x2: [C1], k3_x3: bar}
|
||||
|
||||
# 1) By default match keys that equal any of the items in the target.
|
||||
t: [k0_x0, k1_x1]
|
||||
r: "{{ l | community.general.remove_keys(target=t) }}"
|
||||
|
||||
# 2) Match keys that start with any of the items in the target.
|
||||
t: [k0, k1]
|
||||
r: "{{ l | community.general.remove_keys(target=t, matching_parameter='starts_with') }}"
|
||||
|
||||
# 3) Match keys that end with any of the items in target.
|
||||
t: [x0, x1]
|
||||
r: "{{ l | community.general.remove_keys(target=t, matching_parameter='ends_with') }}"
|
||||
|
||||
# 4) Match keys by the regex.
|
||||
t: ['^.*[01]_x.*$']
|
||||
r: "{{ l | community.general.remove_keys(target=t, matching_parameter='regex') }}"
|
||||
|
||||
# 5) Match keys by the regex.
|
||||
t: '^.*[01]_x.*$'
|
||||
r: "{{ l | community.general.remove_keys(target=t, matching_parameter='regex') }}"
|
||||
|
||||
# The results of above examples 1-5 are all the same.
|
||||
r:
|
||||
- {k2_x2: [C0], k3_x3: foo}
|
||||
- {k2_x2: [C1], k3_x3: bar}
|
||||
|
||||
# 6) By default match keys that equal the target.
|
||||
t: k0_x0
|
||||
r: "{{ l | community.general.remove_keys(target=t) }}"
|
||||
|
||||
# 7) Match keys that start with the target.
|
||||
t: k0
|
||||
r: "{{ l | community.general.remove_keys(target=t, matching_parameter='starts_with') }}"
|
||||
|
||||
# 8) Match keys that end with the target.
|
||||
t: x0
|
||||
r: "{{ l | community.general.remove_keys(target=t, matching_parameter='ends_with') }}"
|
||||
|
||||
# 9) Match keys by the regex.
|
||||
t: '^.*0_x.*$'
|
||||
r: "{{ l | community.general.remove_keys(target=t, matching_parameter='regex') }}"
|
||||
|
||||
# The results of above examples 6-9 are all the same.
|
||||
r:
|
||||
- {k1_x1: B0, k2_x2: [C0], k3_x3: foo}
|
||||
- {k1_x1: B1, k2_x2: [C1], k3_x3: bar}
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
_value:
|
||||
description: The list of dictionaries with selected keys removed.
|
||||
type: list
|
||||
elements: dictionary
|
||||
'''
|
||||
|
||||
from ansible_collections.community.general.plugins.plugin_utils.keys_filter import (
|
||||
_keys_filter_params,
|
||||
_keys_filter_target_str)
|
||||
|
||||
|
||||
def remove_keys(data, target=None, matching_parameter='equal'):
|
||||
"""remove specific keys from dictionaries in a list"""
|
||||
|
||||
# test parameters
|
||||
_keys_filter_params(data, matching_parameter)
|
||||
# test and transform target
|
||||
tt = _keys_filter_target_str(target, matching_parameter)
|
||||
|
||||
if matching_parameter == 'equal':
|
||||
def keep_key(key):
|
||||
return key not in tt
|
||||
elif matching_parameter == 'starts_with':
|
||||
def keep_key(key):
|
||||
return not key.startswith(tt)
|
||||
elif matching_parameter == 'ends_with':
|
||||
def keep_key(key):
|
||||
return not key.endswith(tt)
|
||||
elif matching_parameter == 'regex':
|
||||
def keep_key(key):
|
||||
return tt.match(key) is None
|
||||
|
||||
return [dict((k, v) for k, v in d.items() if keep_key(k)) for d in data]
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
|
||||
def filters(self):
|
||||
return {
|
||||
'remove_keys': remove_keys,
|
||||
}
|
||||
180
plugins/filter/replace_keys.py
Normal file
180
plugins/filter/replace_keys.py
Normal file
@@ -0,0 +1,180 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2024 Vladimir Botka <vbotka@gmail.com>
|
||||
# Copyright (c) 2024 Felix Fontein <felix@fontein.de>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
name: replace_keys
|
||||
short_description: Replace specific keys in a list of dictionaries
|
||||
version_added: "9.1.0"
|
||||
author:
|
||||
- Vladimir Botka (@vbotka)
|
||||
- Felix Fontein (@felixfontein)
|
||||
description: This filter replaces specified keys in a provided list of dictionaries.
|
||||
options:
|
||||
_input:
|
||||
description:
|
||||
- A list of dictionaries.
|
||||
- Top level keys must be strings.
|
||||
type: list
|
||||
elements: dictionary
|
||||
required: true
|
||||
target:
|
||||
description:
|
||||
- A list of dictionaries with attributes C(before) and C(after).
|
||||
- The value of O(target[].after) replaces key matching O(target[].before).
|
||||
type: list
|
||||
elements: dictionary
|
||||
required: true
|
||||
suboptions:
|
||||
before:
|
||||
description:
|
||||
- A key or key pattern to change.
|
||||
- The interpretation of O(target[].before) depends on O(matching_parameter).
|
||||
- For a key that matches multiple O(target[].before)s, the B(first) matching O(target[].after) will be used.
|
||||
type: str
|
||||
after:
|
||||
description: A matching key change to.
|
||||
type: str
|
||||
matching_parameter:
|
||||
description: Specify the matching option of target keys.
|
||||
type: str
|
||||
default: equal
|
||||
choices:
|
||||
equal: Matches keys of exactly one of the O(target[].before) items.
|
||||
starts_with: Matches keys that start with one of the O(target[].before) items.
|
||||
ends_with: Matches keys that end with one of the O(target[].before) items.
|
||||
regex: Matches keys that match one of the regular expressions provided in O(target[].before).
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
l:
|
||||
- {k0_x0: A0, k1_x1: B0, k2_x2: [C0], k3_x3: foo}
|
||||
- {k0_x0: A1, k1_x1: B1, k2_x2: [C1], k3_x3: bar}
|
||||
|
||||
# 1) By default, replace keys that are equal any of the attributes before.
|
||||
t:
|
||||
- {before: k0_x0, after: a0}
|
||||
- {before: k1_x1, after: a1}
|
||||
r: "{{ l | community.general.replace_keys(target=t) }}"
|
||||
|
||||
# 2) Replace keys that starts with any of the attributes before.
|
||||
t:
|
||||
- {before: k0, after: a0}
|
||||
- {before: k1, after: a1}
|
||||
r: "{{ l | community.general.replace_keys(target=t, matching_parameter='starts_with') }}"
|
||||
|
||||
# 3) Replace keys that ends with any of the attributes before.
|
||||
t:
|
||||
- {before: x0, after: a0}
|
||||
- {before: x1, after: a1}
|
||||
r: "{{ l | community.general.replace_keys(target=t, matching_parameter='ends_with') }}"
|
||||
|
||||
# 4) Replace keys that match any regex of the attributes before.
|
||||
t:
|
||||
- {before: "^.*0_x.*$", after: a0}
|
||||
- {before: "^.*1_x.*$", after: a1}
|
||||
r: "{{ l | community.general.replace_keys(target=t, matching_parameter='regex') }}"
|
||||
|
||||
# The results of above examples 1-4 are all the same.
|
||||
r:
|
||||
- {a0: A0, a1: B0, k2_x2: [C0], k3_x3: foo}
|
||||
- {a0: A1, a1: B1, k2_x2: [C1], k3_x3: bar}
|
||||
|
||||
# 5) If more keys match the same attribute before the last one will be used.
|
||||
t:
|
||||
- {before: "^.*_x.*$", after: X}
|
||||
r: "{{ l | community.general.replace_keys(target=t, matching_parameter='regex') }}"
|
||||
|
||||
# gives
|
||||
|
||||
r:
|
||||
- X: foo
|
||||
- X: bar
|
||||
|
||||
# 6) If there are items with equal attribute before the first one will be used.
|
||||
t:
|
||||
- {before: "^.*_x.*$", after: X}
|
||||
- {before: "^.*_x.*$", after: Y}
|
||||
r: "{{ l | community.general.replace_keys(target=t, matching_parameter='regex') }}"
|
||||
|
||||
# gives
|
||||
|
||||
r:
|
||||
- X: foo
|
||||
- X: bar
|
||||
|
||||
# 7) If there are more matches for a key the first one will be used.
|
||||
l:
|
||||
- {aaa1: A, bbb1: B, ccc1: C}
|
||||
- {aaa2: D, bbb2: E, ccc2: F}
|
||||
t:
|
||||
- {before: a, after: X}
|
||||
- {before: aa, after: Y}
|
||||
r: "{{ l | community.general.replace_keys(target=t, matching_parameter='starts_with') }}"
|
||||
|
||||
# gives
|
||||
|
||||
r:
|
||||
- {X: A, bbb1: B, ccc1: C}
|
||||
- {X: D, bbb2: E, ccc2: F}
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
_value:
|
||||
description: The list of dictionaries with replaced keys.
|
||||
type: list
|
||||
elements: dictionary
|
||||
'''
|
||||
|
||||
from ansible_collections.community.general.plugins.plugin_utils.keys_filter import (
|
||||
_keys_filter_params,
|
||||
_keys_filter_target_dict)
|
||||
|
||||
|
||||
def replace_keys(data, target=None, matching_parameter='equal'):
|
||||
"""replace specific keys in a list of dictionaries"""
|
||||
|
||||
# test parameters
|
||||
_keys_filter_params(data, matching_parameter)
|
||||
# test and transform target
|
||||
tz = _keys_filter_target_dict(target, matching_parameter)
|
||||
|
||||
if matching_parameter == 'equal':
|
||||
def replace_key(key):
|
||||
for b, a in tz:
|
||||
if key == b:
|
||||
return a
|
||||
return key
|
||||
elif matching_parameter == 'starts_with':
|
||||
def replace_key(key):
|
||||
for b, a in tz:
|
||||
if key.startswith(b):
|
||||
return a
|
||||
return key
|
||||
elif matching_parameter == 'ends_with':
|
||||
def replace_key(key):
|
||||
for b, a in tz:
|
||||
if key.endswith(b):
|
||||
return a
|
||||
return key
|
||||
elif matching_parameter == 'regex':
|
||||
def replace_key(key):
|
||||
for b, a in tz:
|
||||
if b.match(key):
|
||||
return a
|
||||
return key
|
||||
|
||||
return [dict((replace_key(k), v) for k, v in d.items()) for d in data]
|
||||
|
||||
|
||||
class FilterModule(object):
|
||||
|
||||
def filters(self):
|
||||
return {
|
||||
'replace_keys': replace_keys,
|
||||
}
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
DOCUMENTATION:
|
||||
name: to_days
|
||||
short_description: Converts a duration string to days
|
||||
short_description: Converte a duration string to days
|
||||
version_added: 0.2.0
|
||||
description:
|
||||
- Parse a human readable time duration string and convert to days.
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
DOCUMENTATION:
|
||||
name: to_hours
|
||||
short_description: Converts a duration string to hours
|
||||
short_description: Converte a duration string to hours
|
||||
version_added: 0.2.0
|
||||
description:
|
||||
- Parse a human readable time duration string and convert to hours.
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
DOCUMENTATION:
|
||||
name: to_milliseconds
|
||||
short_description: Converts a duration string to milliseconds
|
||||
short_description: Converte a duration string to milliseconds
|
||||
version_added: 0.2.0
|
||||
description:
|
||||
- Parse a human readable time duration string and convert to milliseconds.
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
DOCUMENTATION:
|
||||
name: to_minutes
|
||||
short_description: Converts a duration string to minutes
|
||||
short_description: Converte a duration string to minutes
|
||||
version_added: 0.2.0
|
||||
description:
|
||||
- Parse a human readable time duration string and convert to minutes.
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
DOCUMENTATION:
|
||||
name: to_months
|
||||
short_description: Convert a duration string to months
|
||||
short_description: Converte a duration string to months
|
||||
version_added: 0.2.0
|
||||
description:
|
||||
- Parse a human readable time duration string and convert to months.
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
DOCUMENTATION:
|
||||
name: to_seconds
|
||||
short_description: Converts a duration string to seconds
|
||||
short_description: Converte a duration string to seconds
|
||||
version_added: 0.2.0
|
||||
description:
|
||||
- Parse a human readable time duration string and convert to seconds.
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
DOCUMENTATION:
|
||||
name: to_time_unit
|
||||
short_description: Converts a duration string to the given time unit
|
||||
short_description: Converte a duration string to the given time unit
|
||||
version_added: 0.2.0
|
||||
description:
|
||||
- Parse a human readable time duration string and convert to the given time unit.
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
DOCUMENTATION:
|
||||
name: to_weeks
|
||||
short_description: Converts a duration string to weeks
|
||||
short_description: Converte a duration string to weeks
|
||||
version_added: 0.2.0
|
||||
description:
|
||||
- Parse a human readable time duration string and convert to weeks.
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
DOCUMENTATION:
|
||||
name: to_years
|
||||
short_description: Converts a duration string to years
|
||||
short_description: Converte a duration string to years
|
||||
version_added: 0.2.0
|
||||
description:
|
||||
- Parse a human readable time duration string and convert to years.
|
||||
|
||||
@@ -329,9 +329,8 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
data = json['data']
|
||||
break
|
||||
else:
|
||||
if json['data']:
|
||||
# /hosts 's 'results' is a list of all hosts, returned is paginated
|
||||
data = data + json['data']
|
||||
# /hosts 's 'results' is a list of all hosts, returned is paginated
|
||||
data = data + json['data']
|
||||
break
|
||||
|
||||
self._cache[self.cache_key][url] = data
|
||||
|
||||
@@ -174,9 +174,8 @@ class Bitwarden(object):
|
||||
else:
|
||||
initial_matches = [initial_matches]
|
||||
|
||||
# Filter to only include results from the right field, if a search is requested by value or field
|
||||
return [item for item in initial_matches
|
||||
if not search_value or not search_field or item.get(search_field) == search_value]
|
||||
# Filter to only include results from the right field.
|
||||
return [item for item in initial_matches if not search_value or item[search_field] == search_value]
|
||||
|
||||
def get_field(self, field, search_value, search_field="name", collection_id=None, organization_id=None):
|
||||
"""Return a list of the specified field for records whose search_field match search_value
|
||||
|
||||
@@ -63,11 +63,11 @@ RETURN = """
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from importlib import import_module
|
||||
|
||||
import yaml
|
||||
|
||||
from ansible.errors import AnsibleLookupError
|
||||
from ansible.module_utils.compat.importlib import import_module
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
|
||||
|
||||
|
||||
@@ -330,7 +330,6 @@ class LookupModule(LookupBase):
|
||||
myres.use_edns(0, ednsflags=dns.flags.DO, payload=edns_size)
|
||||
|
||||
domains = []
|
||||
nameservers = []
|
||||
qtype = self.get_option('qtype')
|
||||
flat = self.get_option('flat')
|
||||
fail_on_error = self.get_option('fail_on_error')
|
||||
@@ -346,6 +345,7 @@ class LookupModule(LookupBase):
|
||||
if t.startswith('@'): # e.g. "@10.0.1.2,192.0.2.1" is ok.
|
||||
nsset = t[1:].split(',')
|
||||
for ns in nsset:
|
||||
nameservers = []
|
||||
# Check if we have a valid IP address. If so, use that, otherwise
|
||||
# try to resolve name to address using system's resolver. If that
|
||||
# fails we bail out.
|
||||
@@ -358,6 +358,7 @@ class LookupModule(LookupBase):
|
||||
nameservers.append(nsaddr)
|
||||
except Exception as e:
|
||||
raise AnsibleError("dns lookup NS: %s" % to_native(e))
|
||||
myres.nameservers = nameservers
|
||||
continue
|
||||
if '=' in t:
|
||||
try:
|
||||
@@ -396,9 +397,6 @@ class LookupModule(LookupBase):
|
||||
|
||||
# print "--- domain = {0} qtype={1} rdclass={2}".format(domain, qtype, rdclass)
|
||||
|
||||
if len(nameservers) > 0:
|
||||
myres.nameservers = nameservers
|
||||
|
||||
if qtype.upper() == 'PTR':
|
||||
reversed_domains = []
|
||||
for domain in domains:
|
||||
|
||||
@@ -49,8 +49,8 @@ EXAMPLES = '''
|
||||
dest: /srv/checkout
|
||||
vars:
|
||||
github_token: >-
|
||||
{{ lookup('community.general.github_app_access_token', key_path='/home/to_your/key',
|
||||
app_id='123456', installation_id='64209') }}
|
||||
lookup('community.general.github_app_access_token', key_path='/home/to_your/key',
|
||||
app_id='123456', installation_id='64209')
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
|
||||
@@ -12,7 +12,7 @@ DOCUMENTATION = """
|
||||
- Mark Ettema (@m-a-r-k-e)
|
||||
- Alexander Petrenz (@alpex8)
|
||||
name: merge_variables
|
||||
short_description: merge variables whose names match a given pattern
|
||||
short_description: merge variables with a certain suffix
|
||||
description:
|
||||
- This lookup returns the merged result of all variables in scope that match the given prefixes, suffixes, or
|
||||
regular expressions, optionally.
|
||||
|
||||
@@ -468,8 +468,7 @@ class LookupModule(LookupBase):
|
||||
def opt_lock(self, type):
|
||||
if self.get_option('lock') == type:
|
||||
tmpdir = os.environ.get('TMPDIR', '/tmp')
|
||||
user = os.environ.get('USER')
|
||||
lockfile = os.path.join(tmpdir, '.{0}.passwordstore.lock'.format(user))
|
||||
lockfile = os.path.join(tmpdir, '.passwordstore.lock')
|
||||
with FileLock().lock_file(lockfile, tmpdir, self.lock_timeout):
|
||||
self.locked = type
|
||||
yield
|
||||
|
||||
@@ -104,37 +104,37 @@ EXAMPLES = r"""
|
||||
- name: Generate random string
|
||||
ansible.builtin.debug:
|
||||
var: lookup('community.general.random_string')
|
||||
# Example result: 'DeadBeeF'
|
||||
# Example result: ['DeadBeeF']
|
||||
|
||||
- name: Generate random string with length 12
|
||||
ansible.builtin.debug:
|
||||
var: lookup('community.general.random_string', length=12)
|
||||
# Example result: 'Uan0hUiX5kVG'
|
||||
# Example result: ['Uan0hUiX5kVG']
|
||||
|
||||
- name: Generate base64 encoded random string
|
||||
ansible.builtin.debug:
|
||||
var: lookup('community.general.random_string', base64=True)
|
||||
# Example result: 'NHZ6eWN5Qk0='
|
||||
# Example result: ['NHZ6eWN5Qk0=']
|
||||
|
||||
- name: Generate a random string with 1 lower, 1 upper, 1 number and 1 special char (at least)
|
||||
ansible.builtin.debug:
|
||||
var: lookup('community.general.random_string', min_lower=1, min_upper=1, min_special=1, min_numeric=1)
|
||||
# Example result: '&Qw2|E[-'
|
||||
# Example result: ['&Qw2|E[-']
|
||||
|
||||
- name: Generate a random string with all lower case characters
|
||||
ansible.builtin.debug:
|
||||
debug:
|
||||
var: query('community.general.random_string', upper=false, numbers=false, special=false)
|
||||
# Example result: ['exolxzyz']
|
||||
|
||||
- name: Generate random hexadecimal string
|
||||
ansible.builtin.debug:
|
||||
debug:
|
||||
var: query('community.general.random_string', upper=false, lower=false, override_special=hex_chars, numbers=false)
|
||||
vars:
|
||||
hex_chars: '0123456789ABCDEF'
|
||||
# Example result: ['D2A40737']
|
||||
|
||||
- name: Generate random hexadecimal string with override_all
|
||||
ansible.builtin.debug:
|
||||
debug:
|
||||
var: query('community.general.random_string', override_all=hex_chars)
|
||||
vars:
|
||||
hex_chars: '0123456789ABCDEF'
|
||||
|
||||
@@ -89,18 +89,31 @@ class FormatError(CmdRunnerException):
|
||||
|
||||
|
||||
class _ArgFormat(object):
|
||||
# DEPRECATION: set default value for ignore_none to True in community.general 12.0.0
|
||||
def __init__(self, func, ignore_none=None, ignore_missing_value=False):
|
||||
self.func = func
|
||||
self.ignore_none = ignore_none
|
||||
self.ignore_missing_value = ignore_missing_value
|
||||
|
||||
def __call__(self, value, ctx_ignore_none):
|
||||
# DEPRECATION: remove parameter ctx_ignore_none in community.general 12.0.0
|
||||
def __call__(self, value, ctx_ignore_none=True):
|
||||
# DEPRECATION: replace ctx_ignore_none with True in community.general 12.0.0
|
||||
ignore_none = self.ignore_none if self.ignore_none is not None else ctx_ignore_none
|
||||
if value is None and ignore_none:
|
||||
return []
|
||||
f = self.func
|
||||
return [str(x) for x in f(value)]
|
||||
|
||||
def __str__(self):
|
||||
return "<ArgFormat: func={0}, ignore_none={1}, ignore_missing_value={2}>".format(
|
||||
self.func,
|
||||
self.ignore_none,
|
||||
self.ignore_missing_value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
|
||||
class _Format(object):
|
||||
@staticmethod
|
||||
@@ -114,7 +127,7 @@ class _Format(object):
|
||||
|
||||
@staticmethod
|
||||
def as_bool_not(args):
|
||||
return _ArgFormat(lambda value: [] if value else _ensure_list(args), ignore_none=False)
|
||||
return _Format.as_bool([], args, ignore_none=False)
|
||||
|
||||
@staticmethod
|
||||
def as_optval(arg, ignore_none=None):
|
||||
@@ -129,8 +142,15 @@ class _Format(object):
|
||||
return _ArgFormat(lambda value: ["{0}={1}".format(arg, value)], ignore_none=ignore_none)
|
||||
|
||||
@staticmethod
|
||||
def as_list(ignore_none=None):
|
||||
return _ArgFormat(_ensure_list, ignore_none=ignore_none)
|
||||
def as_list(ignore_none=None, min_len=0, max_len=None):
|
||||
def func(value):
|
||||
value = _ensure_list(value)
|
||||
if len(value) < min_len:
|
||||
raise ValueError("Parameter must have at least {0} element(s)".format(min_len))
|
||||
if max_len is not None and len(value) > max_len:
|
||||
raise ValueError("Parameter must have at most {0} element(s)".format(max_len))
|
||||
return value
|
||||
return _ArgFormat(func, ignore_none=ignore_none)
|
||||
|
||||
@staticmethod
|
||||
def as_fixed(args):
|
||||
@@ -177,6 +197,19 @@ class _Format(object):
|
||||
return func(**v)
|
||||
return wrapper
|
||||
|
||||
@staticmethod
|
||||
def stack(fmt):
|
||||
@wraps(fmt)
|
||||
def wrapper(*args, **kwargs):
|
||||
new_func = fmt(ignore_none=True, *args, **kwargs)
|
||||
|
||||
def stacking(value):
|
||||
stack = [new_func(v) for v in value if v]
|
||||
stack = [x for args in stack for x in args]
|
||||
return stack
|
||||
return _ArgFormat(stacking, ignore_none=True)
|
||||
return wrapper
|
||||
|
||||
|
||||
class CmdRunner(object):
|
||||
"""
|
||||
@@ -197,7 +230,11 @@ class CmdRunner(object):
|
||||
self.default_args_order = self._prepare_args_order(default_args_order)
|
||||
if arg_formats is None:
|
||||
arg_formats = {}
|
||||
self.arg_formats = dict(arg_formats)
|
||||
self.arg_formats = {}
|
||||
for fmt_name, fmt in arg_formats.items():
|
||||
if not isinstance(fmt, _ArgFormat):
|
||||
fmt = _Format.as_func(func=fmt, ignore_none=True)
|
||||
self.arg_formats[fmt_name] = fmt
|
||||
self.check_rc = check_rc
|
||||
self.force_lang = force_lang
|
||||
self.path_prefix = path_prefix
|
||||
@@ -216,7 +253,16 @@ class CmdRunner(object):
|
||||
def binary(self):
|
||||
return self.command[0]
|
||||
|
||||
def __call__(self, args_order=None, output_process=None, ignore_value_none=True, check_mode_skip=False, check_mode_return=None, **kwargs):
|
||||
# remove parameter ignore_value_none in community.general 12.0.0
|
||||
def __call__(self, args_order=None, output_process=None, ignore_value_none=None, check_mode_skip=False, check_mode_return=None, **kwargs):
|
||||
if ignore_value_none is None:
|
||||
ignore_value_none = True
|
||||
else:
|
||||
self.module.deprecate(
|
||||
"Using ignore_value_none when creating the runner context is now deprecated, "
|
||||
"and the parameter will be removed in community.general 12.0.0. ",
|
||||
version="12.0.0", collection_name="community.general"
|
||||
)
|
||||
if output_process is None:
|
||||
output_process = _process_as_is
|
||||
if args_order is None:
|
||||
@@ -228,7 +274,7 @@ class CmdRunner(object):
|
||||
return _CmdRunnerContext(runner=self,
|
||||
args_order=args_order,
|
||||
output_process=output_process,
|
||||
ignore_value_none=ignore_value_none,
|
||||
ignore_value_none=ignore_value_none, # DEPRECATION: remove in community.general 12.0.0
|
||||
check_mode_skip=check_mode_skip,
|
||||
check_mode_return=check_mode_return, **kwargs)
|
||||
|
||||
@@ -244,6 +290,7 @@ class _CmdRunnerContext(object):
|
||||
self.runner = runner
|
||||
self.args_order = tuple(args_order)
|
||||
self.output_process = output_process
|
||||
# DEPRECATION: parameter ignore_value_none at the context level is deprecated and will be removed in community.general 12.0.0
|
||||
self.ignore_value_none = ignore_value_none
|
||||
self.check_mode_skip = check_mode_skip
|
||||
self.check_mode_return = check_mode_return
|
||||
@@ -283,6 +330,7 @@ class _CmdRunnerContext(object):
|
||||
value = named_args[arg_name]
|
||||
elif not runner.arg_formats[arg_name].ignore_missing_value:
|
||||
raise MissingArgumentValue(self.args_order, arg_name)
|
||||
# DEPRECATION: remove parameter ctx_ignore_none in 12.0.0
|
||||
self.cmd.extend(runner.arg_formats[arg_name](value, ctx_ignore_none=self.ignore_value_none))
|
||||
except MissingArgumentValue:
|
||||
raise
|
||||
@@ -299,7 +347,7 @@ class _CmdRunnerContext(object):
|
||||
@property
|
||||
def run_info(self):
|
||||
return dict(
|
||||
ignore_value_none=self.ignore_value_none,
|
||||
ignore_value_none=self.ignore_value_none, # DEPRECATION: remove in community.general 12.0.0
|
||||
check_rc=self.check_rc,
|
||||
environ_update=self.environ_update,
|
||||
args_order=self.args_order,
|
||||
|
||||
@@ -10,6 +10,7 @@ __metaclass__ = type
|
||||
|
||||
import copy
|
||||
import json
|
||||
import re
|
||||
|
||||
from ansible.module_utils.six.moves.urllib import error as urllib_error
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||
@@ -68,6 +69,25 @@ def camel_case_key(key):
|
||||
return "".join(parts)
|
||||
|
||||
|
||||
def validate_check(check):
|
||||
validate_duration_keys = ['Interval', 'Ttl', 'Timeout']
|
||||
validate_tcp_regex = r"(?P<host>.*):(?P<port>(?:[0-9]+))$"
|
||||
if check.get('Tcp') is not None:
|
||||
match = re.match(validate_tcp_regex, check['Tcp'])
|
||||
if not match:
|
||||
raise Exception('tcp check must be in host:port format')
|
||||
for duration in validate_duration_keys:
|
||||
if duration in check and check[duration] is not None:
|
||||
check[duration] = validate_duration(check[duration])
|
||||
|
||||
|
||||
def validate_duration(duration):
|
||||
if duration:
|
||||
if not re.search(r"\d+(?:ns|us|ms|s|m|h)", duration):
|
||||
duration = "{0}s".format(duration)
|
||||
return duration
|
||||
|
||||
|
||||
STATE_PARAMETER = "state"
|
||||
STATE_PRESENT = "present"
|
||||
STATE_ABSENT = "absent"
|
||||
@@ -81,7 +101,7 @@ OPERATION_DELETE = "remove"
|
||||
def _normalize_params(params, arg_spec):
|
||||
final_params = {}
|
||||
for k, v in params.items():
|
||||
if k not in arg_spec: # Alias
|
||||
if k not in arg_spec or v is None: # Alias
|
||||
continue
|
||||
spec = arg_spec[k]
|
||||
if (
|
||||
@@ -105,9 +125,10 @@ class _ConsulModule:
|
||||
"""
|
||||
|
||||
api_endpoint = None # type: str
|
||||
unique_identifier = None # type: str
|
||||
unique_identifiers = None # type: list
|
||||
result_key = None # type: str
|
||||
create_only_fields = set()
|
||||
operational_attributes = set()
|
||||
params = {}
|
||||
|
||||
def __init__(self, module):
|
||||
@@ -119,6 +140,8 @@ class _ConsulModule:
|
||||
if k not in STATE_PARAMETER and k not in AUTH_ARGUMENTS_SPEC
|
||||
}
|
||||
|
||||
self.operational_attributes.update({"CreateIndex", "CreateTime", "Hash", "ModifyIndex"})
|
||||
|
||||
def execute(self):
|
||||
obj = self.read_object()
|
||||
|
||||
@@ -203,14 +226,24 @@ class _ConsulModule:
|
||||
return False
|
||||
|
||||
def prepare_object(self, existing, obj):
|
||||
operational_attributes = {"CreateIndex", "CreateTime", "Hash", "ModifyIndex"}
|
||||
existing = {
|
||||
k: v for k, v in existing.items() if k not in operational_attributes
|
||||
k: v for k, v in existing.items() if k not in self.operational_attributes
|
||||
}
|
||||
for k, v in obj.items():
|
||||
existing[k] = v
|
||||
return existing
|
||||
|
||||
def id_from_obj(self, obj, camel_case=False):
|
||||
def key_func(key):
|
||||
return camel_case_key(key) if camel_case else key
|
||||
|
||||
if self.unique_identifiers:
|
||||
for identifier in self.unique_identifiers:
|
||||
identifier = key_func(identifier)
|
||||
if identifier in obj:
|
||||
return obj[identifier]
|
||||
return None
|
||||
|
||||
def endpoint_url(self, operation, identifier=None):
|
||||
if operation == OPERATION_CREATE:
|
||||
return self.api_endpoint
|
||||
@@ -219,7 +252,8 @@ class _ConsulModule:
|
||||
raise RuntimeError("invalid arguments passed")
|
||||
|
||||
def read_object(self):
|
||||
url = self.endpoint_url(OPERATION_READ, self.params.get(self.unique_identifier))
|
||||
identifier = self.id_from_obj(self.params)
|
||||
url = self.endpoint_url(OPERATION_READ, identifier)
|
||||
try:
|
||||
return self.get(url)
|
||||
except RequestError as e:
|
||||
@@ -233,25 +267,28 @@ class _ConsulModule:
|
||||
if self._module.check_mode:
|
||||
return obj
|
||||
else:
|
||||
return self.put(self.api_endpoint, data=self.prepare_object({}, obj))
|
||||
url = self.endpoint_url(OPERATION_CREATE)
|
||||
created_obj = self.put(url, data=self.prepare_object({}, obj))
|
||||
if created_obj is None:
|
||||
created_obj = self.read_object()
|
||||
return created_obj
|
||||
|
||||
def update_object(self, existing, obj):
|
||||
url = self.endpoint_url(
|
||||
OPERATION_UPDATE, existing.get(camel_case_key(self.unique_identifier))
|
||||
)
|
||||
merged_object = self.prepare_object(existing, obj)
|
||||
if self._module.check_mode:
|
||||
return merged_object
|
||||
else:
|
||||
return self.put(url, data=merged_object)
|
||||
url = self.endpoint_url(OPERATION_UPDATE, self.id_from_obj(existing, camel_case=True))
|
||||
updated_obj = self.put(url, data=merged_object)
|
||||
if updated_obj is None:
|
||||
updated_obj = self.read_object()
|
||||
return updated_obj
|
||||
|
||||
def delete_object(self, obj):
|
||||
if self._module.check_mode:
|
||||
return {}
|
||||
else:
|
||||
url = self.endpoint_url(
|
||||
OPERATION_DELETE, obj.get(camel_case_key(self.unique_identifier))
|
||||
)
|
||||
url = self.endpoint_url(OPERATION_DELETE, self.id_from_obj(obj, camel_case=True))
|
||||
return self.delete(url)
|
||||
|
||||
def _request(self, method, url_parts, data=None, params=None):
|
||||
@@ -309,7 +346,9 @@ class _ConsulModule:
|
||||
if 400 <= status < 600:
|
||||
raise RequestError(status, response_data)
|
||||
|
||||
return json.loads(response_data)
|
||||
if response_data:
|
||||
return json.loads(response_data)
|
||||
return None
|
||||
|
||||
def get(self, url_parts, **kwargs):
|
||||
return self._request("GET", url_parts, **kwargs)
|
||||
|
||||
116
plugins/module_utils/django.py
Normal file
116
plugins/module_utils/django.py
Normal file
@@ -0,0 +1,116 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2024, Alexei Znamensky <russoz@gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
from ansible.module_utils.common.dict_transformations import dict_merge
|
||||
from ansible_collections.community.general.plugins.module_utils.cmd_runner import cmd_runner_fmt
|
||||
from ansible_collections.community.general.plugins.module_utils.python_runner import PythonRunner
|
||||
from ansible_collections.community.general.plugins.module_utils.module_helper import ModuleHelper
|
||||
|
||||
|
||||
django_std_args = dict(
|
||||
# environmental options
|
||||
venv=dict(type="path"),
|
||||
# default options of django-admin
|
||||
settings=dict(type="str", required=True),
|
||||
pythonpath=dict(type="path"),
|
||||
traceback=dict(type="bool"),
|
||||
verbosity=dict(type="int", choices=[0, 1, 2, 3]),
|
||||
skip_checks=dict(type="bool"),
|
||||
)
|
||||
|
||||
_django_std_arg_fmts = dict(
|
||||
command=cmd_runner_fmt.as_list(),
|
||||
settings=cmd_runner_fmt.as_opt_eq_val("--settings"),
|
||||
pythonpath=cmd_runner_fmt.as_opt_eq_val("--pythonpath"),
|
||||
traceback=cmd_runner_fmt.as_bool("--traceback"),
|
||||
verbosity=cmd_runner_fmt.as_opt_val("--verbosity"),
|
||||
no_color=cmd_runner_fmt.as_fixed("--no-color"),
|
||||
skip_checks=cmd_runner_fmt.as_bool("--skip-checks"),
|
||||
)
|
||||
|
||||
_django_database_args = dict(
|
||||
database=dict(type="str", default="default"),
|
||||
)
|
||||
|
||||
_args_menu = dict(
|
||||
std=(django_std_args, _django_std_arg_fmts),
|
||||
database=(_django_database_args, {"database": cmd_runner_fmt.as_opt_eq_val("--database")}),
|
||||
noinput=({}, {"noinput": cmd_runner_fmt.as_fixed("--noinput")}),
|
||||
dry_run=({}, {"dry_run": cmd_runner_fmt.as_bool("--dry-run")}),
|
||||
check=({}, {"check": cmd_runner_fmt.as_bool("--check")}),
|
||||
)
|
||||
|
||||
|
||||
class _DjangoRunner(PythonRunner):
|
||||
def __init__(self, module, arg_formats=None, **kwargs):
|
||||
arg_fmts = dict(arg_formats) if arg_formats else {}
|
||||
arg_fmts.update(_django_std_arg_fmts)
|
||||
|
||||
super(_DjangoRunner, self).__init__(module, ["-m", "django"], arg_formats=arg_fmts, **kwargs)
|
||||
|
||||
def __call__(self, output_process=None, ignore_value_none=True, check_mode_skip=False, check_mode_return=None, **kwargs):
|
||||
args_order = (
|
||||
("command", "no_color", "settings", "pythonpath", "traceback", "verbosity", "skip_checks") + self._prepare_args_order(self.default_args_order)
|
||||
)
|
||||
return super(_DjangoRunner, self).__call__(args_order, output_process, ignore_value_none, check_mode_skip, check_mode_return, **kwargs)
|
||||
|
||||
|
||||
class DjangoModuleHelper(ModuleHelper):
|
||||
module = {}
|
||||
use_old_vardict = False
|
||||
django_admin_cmd = None
|
||||
arg_formats = {}
|
||||
django_admin_arg_order = ()
|
||||
use_old_vardict = False
|
||||
_django_args = []
|
||||
_check_mode_arg = ""
|
||||
|
||||
def __init__(self):
|
||||
self.module["argument_spec"], self.arg_formats = self._build_args(self.module.get("argument_spec", {}),
|
||||
self.arg_formats,
|
||||
*(["std"] + self._django_args))
|
||||
super(DjangoModuleHelper, self).__init__(self.module)
|
||||
if self.django_admin_cmd is not None:
|
||||
self.vars.command = self.django_admin_cmd
|
||||
|
||||
@staticmethod
|
||||
def _build_args(arg_spec, arg_format, *names):
|
||||
res_arg_spec = {}
|
||||
res_arg_fmts = {}
|
||||
for name in names:
|
||||
args, fmts = _args_menu[name]
|
||||
res_arg_spec = dict_merge(res_arg_spec, args)
|
||||
res_arg_fmts = dict_merge(res_arg_fmts, fmts)
|
||||
res_arg_spec = dict_merge(res_arg_spec, arg_spec)
|
||||
res_arg_fmts = dict_merge(res_arg_fmts, arg_format)
|
||||
|
||||
return res_arg_spec, res_arg_fmts
|
||||
|
||||
def __run__(self):
|
||||
runner = _DjangoRunner(self.module,
|
||||
default_args_order=self.django_admin_arg_order,
|
||||
arg_formats=self.arg_formats,
|
||||
venv=self.vars.venv,
|
||||
check_rc=True)
|
||||
with runner() as ctx:
|
||||
run_params = self.vars.as_dict()
|
||||
if self._check_mode_arg:
|
||||
run_params.update({self._check_mode_arg: self.check_mode})
|
||||
results = ctx.run(**run_params)
|
||||
self.vars.stdout = ctx.results_out
|
||||
self.vars.stderr = ctx.results_err
|
||||
self.vars.cmd = ctx.cmd
|
||||
if self.verbosity >= 3:
|
||||
self.vars.run_info = ctx.run_info
|
||||
|
||||
return results
|
||||
|
||||
@classmethod
|
||||
def execute(cls):
|
||||
cls().run()
|
||||
@@ -33,6 +33,7 @@ class GandiLiveDNSAPI(object):
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
self.api_key = module.params['api_key']
|
||||
self.personal_access_token = module.params['personal_access_token']
|
||||
|
||||
def _build_error_message(self, module, info):
|
||||
s = ''
|
||||
@@ -50,7 +51,12 @@ class GandiLiveDNSAPI(object):
|
||||
return s
|
||||
|
||||
def _gandi_api_call(self, api_call, method='GET', payload=None, error_on_404=True):
|
||||
headers = {'Authorization': 'Apikey {0}'.format(self.api_key),
|
||||
authorization_header = (
|
||||
'Bearer {0}'.format(self.personal_access_token)
|
||||
if self.personal_access_token
|
||||
else 'Apikey {0}'.format(self.api_key)
|
||||
)
|
||||
headers = {'Authorization': authorization_header,
|
||||
'Content-Type': 'application/json'}
|
||||
data = None
|
||||
if payload:
|
||||
|
||||
@@ -115,6 +115,11 @@ def gitlab_authentication(module, min_version=None):
|
||||
# Changelog : https://github.com/python-gitlab/python-gitlab/releases/tag/v1.13.0
|
||||
# This condition allow to still support older version of the python-gitlab library
|
||||
if LooseVersion(gitlab.__version__) < LooseVersion("1.13.0"):
|
||||
module.deprecate(
|
||||
"GitLab basic auth is deprecated and will be removed in next major version, "
|
||||
"using another auth method (API token or OAuth) is strongly recommended.",
|
||||
version='10.0.0',
|
||||
collection_name='community.general')
|
||||
gitlab_instance = gitlab.Gitlab(url=gitlab_url, ssl_verify=verify, email=gitlab_user, password=gitlab_password,
|
||||
private_token=gitlab_token, api_version=4)
|
||||
else:
|
||||
|
||||
115
plugins/module_utils/homebrew.py
Normal file
115
plugins/module_utils/homebrew.py
Normal file
@@ -0,0 +1,115 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) Ansible project
|
||||
# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause)
|
||||
# SPDX-License-Identifier: BSD-2-Clause
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import re
|
||||
from ansible.module_utils.six import string_types
|
||||
|
||||
|
||||
def _create_regex_group_complement(s):
|
||||
lines = (line.strip() for line in s.split("\n") if line.strip())
|
||||
chars = filter(None, (line.split("#")[0].strip() for line in lines))
|
||||
group = r"[^" + r"".join(chars) + r"]"
|
||||
return re.compile(group)
|
||||
|
||||
|
||||
class HomebrewValidate(object):
|
||||
# class regexes ------------------------------------------------ {{{
|
||||
VALID_PATH_CHARS = r"""
|
||||
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
|
||||
\s # spaces
|
||||
: # colons
|
||||
{sep} # the OS-specific path separator
|
||||
. # dots
|
||||
\- # dashes
|
||||
""".format(
|
||||
sep=os.path.sep
|
||||
)
|
||||
|
||||
VALID_BREW_PATH_CHARS = r"""
|
||||
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
|
||||
\s # spaces
|
||||
{sep} # the OS-specific path separator
|
||||
. # dots
|
||||
\- # dashes
|
||||
""".format(
|
||||
sep=os.path.sep
|
||||
)
|
||||
|
||||
VALID_PACKAGE_CHARS = r"""
|
||||
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
|
||||
. # dots
|
||||
/ # slash (for taps)
|
||||
\+ # plusses
|
||||
\- # dashes
|
||||
: # colons (for URLs)
|
||||
@ # at-sign
|
||||
"""
|
||||
|
||||
INVALID_PATH_REGEX = _create_regex_group_complement(VALID_PATH_CHARS)
|
||||
INVALID_BREW_PATH_REGEX = _create_regex_group_complement(VALID_BREW_PATH_CHARS)
|
||||
INVALID_PACKAGE_REGEX = _create_regex_group_complement(VALID_PACKAGE_CHARS)
|
||||
# /class regexes ----------------------------------------------- }}}
|
||||
|
||||
# class validations -------------------------------------------- {{{
|
||||
@classmethod
|
||||
def valid_path(cls, path):
|
||||
"""
|
||||
`path` must be one of:
|
||||
- list of paths
|
||||
- a string containing only:
|
||||
- alphanumeric characters
|
||||
- dashes
|
||||
- dots
|
||||
- spaces
|
||||
- colons
|
||||
- os.path.sep
|
||||
"""
|
||||
|
||||
if isinstance(path, string_types):
|
||||
return not cls.INVALID_PATH_REGEX.search(path)
|
||||
|
||||
try:
|
||||
iter(path)
|
||||
except TypeError:
|
||||
return False
|
||||
else:
|
||||
paths = path
|
||||
return all(cls.valid_brew_path(path_) for path_ in paths)
|
||||
|
||||
@classmethod
|
||||
def valid_brew_path(cls, brew_path):
|
||||
"""
|
||||
`brew_path` must be one of:
|
||||
- None
|
||||
- a string containing only:
|
||||
- alphanumeric characters
|
||||
- dashes
|
||||
- dots
|
||||
- spaces
|
||||
- os.path.sep
|
||||
"""
|
||||
|
||||
if brew_path is None:
|
||||
return True
|
||||
|
||||
return isinstance(
|
||||
brew_path, string_types
|
||||
) and not cls.INVALID_BREW_PATH_REGEX.search(brew_path)
|
||||
|
||||
@classmethod
|
||||
def valid_package(cls, package):
|
||||
"""A valid package is either None or alphanumeric."""
|
||||
|
||||
if package is None:
|
||||
return True
|
||||
|
||||
return isinstance(
|
||||
package, string_types
|
||||
) and not cls.INVALID_PACKAGE_REGEX.search(package)
|
||||
@@ -29,7 +29,6 @@ class iLORedfishUtils(RedfishUtils):
|
||||
result['ret'] = True
|
||||
data = response['data']
|
||||
|
||||
current_session = None
|
||||
if 'Oem' in data:
|
||||
if data["Oem"]["Hpe"]["Links"]["MySession"]["@odata.id"]:
|
||||
current_session = data["Oem"]["Hpe"]["Links"]["MySession"]["@odata.id"]
|
||||
|
||||
@@ -7,13 +7,14 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
import traceback
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.base import ModuleHelperBase
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.deco import module_fails_on_exception
|
||||
|
||||
|
||||
class DependencyCtxMgr(object):
|
||||
"""
|
||||
DEPRECATION WARNING
|
||||
|
||||
This class is deprecated and will be removed in community.general 11.0.0
|
||||
Modules should use plugins/module_utils/deps.py instead.
|
||||
"""
|
||||
def __init__(self, name, msg=None):
|
||||
self.name = name
|
||||
self.msg = msg
|
||||
@@ -35,39 +36,3 @@ class DependencyCtxMgr(object):
|
||||
@property
|
||||
def text(self):
|
||||
return self.msg or str(self.exc_val)
|
||||
|
||||
|
||||
class DependencyMixin(ModuleHelperBase):
|
||||
"""
|
||||
THIS CLASS IS BEING DEPRECATED.
|
||||
See the deprecation notice in ``DependencyMixin.fail_on_missing_deps()`` below.
|
||||
|
||||
Mixin for mapping module options to running a CLI command with its arguments.
|
||||
"""
|
||||
_dependencies = []
|
||||
|
||||
@classmethod
|
||||
def dependency(cls, name, msg):
|
||||
cls._dependencies.append(DependencyCtxMgr(name, msg))
|
||||
return cls._dependencies[-1]
|
||||
|
||||
def fail_on_missing_deps(self):
|
||||
if not self._dependencies:
|
||||
return
|
||||
self.module.deprecate(
|
||||
'The DependencyMixin is being deprecated. '
|
||||
'Modules should use community.general.plugins.module_utils.deps instead.',
|
||||
version='9.0.0',
|
||||
collection_name='community.general',
|
||||
)
|
||||
for d in self._dependencies:
|
||||
if not d.has_it:
|
||||
self.module.fail_json(changed=False,
|
||||
exception="\n".join(traceback.format_exception(d.exc_type, d.exc_val, d.exc_tb)),
|
||||
msg=d.text,
|
||||
**self.output)
|
||||
|
||||
@module_fails_on_exception
|
||||
def run(self):
|
||||
self.fail_on_missing_deps()
|
||||
super(DependencyMixin, self).run()
|
||||
|
||||
@@ -14,7 +14,7 @@ class VarMeta(object):
|
||||
"""
|
||||
DEPRECATION WARNING
|
||||
|
||||
This class is deprecated and will be removed in community.general 10.0.0
|
||||
This class is deprecated and will be removed in community.general 11.0.0
|
||||
Modules should use the VarDict from plugins/module_utils/vardict.py instead.
|
||||
"""
|
||||
|
||||
@@ -70,7 +70,7 @@ class VarDict(object):
|
||||
"""
|
||||
DEPRECATION WARNING
|
||||
|
||||
This class is deprecated and will be removed in community.general 10.0.0
|
||||
This class is deprecated and will be removed in community.general 11.0.0
|
||||
Modules should use the VarDict from plugins/module_utils/vardict.py instead.
|
||||
"""
|
||||
def __init__(self):
|
||||
@@ -139,7 +139,7 @@ class VarsMixin(object):
|
||||
"""
|
||||
DEPRECATION WARNING
|
||||
|
||||
This class is deprecated and will be removed in community.general 10.0.0
|
||||
This class is deprecated and will be removed in community.general 11.0.0
|
||||
Modules should use the VarDict from plugins/module_utils/vardict.py instead.
|
||||
"""
|
||||
def __init__(self, module=None):
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# (c) 2020, Alexei Znamensky <russoz@gmail.com>
|
||||
# Copyright (c) 2020, Ansible Project
|
||||
# (c) 2020-2024, Alexei Znamensky <russoz@gmail.com>
|
||||
# Copyright (c) 2020-2024, Ansible Project
|
||||
# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause)
|
||||
# SPDX-License-Identifier: BSD-2-Clause
|
||||
|
||||
@@ -10,23 +10,40 @@ __metaclass__ = type
|
||||
|
||||
from ansible.module_utils.common.dict_transformations import dict_merge
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.vardict import VarDict as _NewVarDict # remove "as NewVarDict" in 11.0.0
|
||||
# (TODO: remove AnsibleModule!) pylint: disable-next=unused-import
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.base import ModuleHelperBase, AnsibleModule # noqa: F401
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.base import AnsibleModule # noqa: F401 DEPRECATED, remove in 11.0.0
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.base import ModuleHelperBase
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.state import StateMixin
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.deps import DependencyMixin
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.vars import VarsMixin
|
||||
# (TODO: remove mh.mixins.vars!) pylint: disable-next=unused-import
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.vars import VarsMixin, VarDict as _OldVarDict # noqa: F401 remove in 11.0.0
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.deprecate_attrs import DeprecateAttrsMixin
|
||||
|
||||
|
||||
class ModuleHelper(DeprecateAttrsMixin, VarsMixin, DependencyMixin, ModuleHelperBase):
|
||||
class ModuleHelper(DeprecateAttrsMixin, ModuleHelperBase):
|
||||
facts_name = None
|
||||
output_params = ()
|
||||
diff_params = ()
|
||||
change_params = ()
|
||||
facts_params = ()
|
||||
use_old_vardict = True # remove in 11.0.0
|
||||
mute_vardict_deprecation = False
|
||||
|
||||
def __init__(self, module=None):
|
||||
super(ModuleHelper, self).__init__(module)
|
||||
if self.use_old_vardict: # remove first half of the if in 11.0.0
|
||||
self.vars = _OldVarDict()
|
||||
super(ModuleHelper, self).__init__(module)
|
||||
if not self.mute_vardict_deprecation:
|
||||
self.module.deprecate(
|
||||
"This class is using the old VarDict from ModuleHelper, which is deprecated. "
|
||||
"Set the class variable use_old_vardict to False and make the necessary adjustments."
|
||||
"The old VarDict class will be removed in community.general 11.0.0",
|
||||
version="11.0.0", collection_name="community.general"
|
||||
)
|
||||
else:
|
||||
self.vars = _NewVarDict()
|
||||
super(ModuleHelper, self).__init__(module)
|
||||
|
||||
for name, value in self.module.params.items():
|
||||
self.vars.set(
|
||||
name, value,
|
||||
@@ -36,6 +53,12 @@ class ModuleHelper(DeprecateAttrsMixin, VarsMixin, DependencyMixin, ModuleHelper
|
||||
fact=name in self.facts_params,
|
||||
)
|
||||
|
||||
def update_vars(self, meta=None, **kwargs):
|
||||
if meta is None:
|
||||
meta = {}
|
||||
for k, v in kwargs.items():
|
||||
self.vars.set(k, v, **meta)
|
||||
|
||||
def update_output(self, **kwargs):
|
||||
self.update_vars(meta={"output": True}, **kwargs)
|
||||
|
||||
@@ -43,7 +66,10 @@ class ModuleHelper(DeprecateAttrsMixin, VarsMixin, DependencyMixin, ModuleHelper
|
||||
self.update_vars(meta={"fact": True}, **kwargs)
|
||||
|
||||
def _vars_changed(self):
|
||||
return any(self.vars.has_changed(v) for v in self.vars.change_vars())
|
||||
if self.use_old_vardict:
|
||||
return any(self.vars.has_changed(v) for v in self.vars.change_vars())
|
||||
|
||||
return self.vars.has_changed
|
||||
|
||||
def has_changed(self):
|
||||
return self.changed or self._vars_changed()
|
||||
|
||||
@@ -9,14 +9,14 @@ __metaclass__ = type
|
||||
|
||||
# pylint: disable=unused-import
|
||||
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.module_helper import (
|
||||
ModuleHelper, StateModuleHelper, AnsibleModule
|
||||
ModuleHelper, StateModuleHelper,
|
||||
AnsibleModule # remove in 11.0.0
|
||||
)
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.state import StateMixin # noqa: F401
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.deps import DependencyCtxMgr, DependencyMixin # noqa: F401
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.state import StateMixin # noqa: F401 remove in 11.0.0
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.deps import DependencyCtxMgr # noqa: F401 remove in 11.0.0
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.exceptions import ModuleHelperException # noqa: F401
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.deco import (
|
||||
cause_changes, module_fails_on_exception, check_mode_skip, check_mode_skip_returns,
|
||||
)
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.vars import VarMeta, VarDict, VarsMixin # noqa: F401
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.vars import VarMeta, VarDict, VarsMixin # noqa: F401 remove in 11.0.0
|
||||
|
||||
@@ -29,6 +29,9 @@ def proxmox_auth_argument_spec():
|
||||
required=True,
|
||||
fallback=(env_fallback, ['PROXMOX_HOST'])
|
||||
),
|
||||
api_port=dict(type='int',
|
||||
fallback=(env_fallback, ['PROXMOX_PORT'])
|
||||
),
|
||||
api_user=dict(type='str',
|
||||
required=True,
|
||||
fallback=(env_fallback, ['PROXMOX_USER'])
|
||||
@@ -82,6 +85,7 @@ class ProxmoxAnsible(object):
|
||||
|
||||
def _connect(self):
|
||||
api_host = self.module.params['api_host']
|
||||
api_port = self.module.params['api_port']
|
||||
api_user = self.module.params['api_user']
|
||||
api_password = self.module.params['api_password']
|
||||
api_token_id = self.module.params['api_token_id']
|
||||
@@ -89,6 +93,10 @@ class ProxmoxAnsible(object):
|
||||
validate_certs = self.module.params['validate_certs']
|
||||
|
||||
auth_args = {'user': api_user}
|
||||
|
||||
if api_port:
|
||||
auth_args['port'] = api_port
|
||||
|
||||
if api_password:
|
||||
auth_args['password'] = api_password
|
||||
else:
|
||||
|
||||
@@ -103,6 +103,7 @@ def puppet_runner(module):
|
||||
modulepath=cmd_runner_fmt.as_opt_eq_val("--modulepath"),
|
||||
_execute=cmd_runner_fmt.as_func(execute_func),
|
||||
summarize=cmd_runner_fmt.as_bool("--summarize"),
|
||||
waitforlock=cmd_runner_fmt.as_opt_val("--waitforlock"),
|
||||
debug=cmd_runner_fmt.as_bool("--debug"),
|
||||
verbose=cmd_runner_fmt.as_bool("--verbose"),
|
||||
),
|
||||
|
||||
34
plugins/module_utils/python_runner.py
Normal file
34
plugins/module_utils/python_runner.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2024, Alexei Znamensky <russoz@gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.cmd_runner import CmdRunner, _ensure_list
|
||||
|
||||
|
||||
class PythonRunner(CmdRunner):
|
||||
def __init__(self, module, command, arg_formats=None, default_args_order=(),
|
||||
check_rc=False, force_lang="C", path_prefix=None, environ_update=None,
|
||||
python="python", venv=None):
|
||||
self.python = python
|
||||
self.venv = venv
|
||||
self.has_venv = venv is not None
|
||||
|
||||
if (os.path.isabs(python) or '/' in python):
|
||||
self.python = python
|
||||
elif self.has_venv:
|
||||
path_prefix = os.path.join(venv, "bin")
|
||||
if environ_update is None:
|
||||
environ_update = {}
|
||||
environ_update["PATH"] = "%s:%s" % (path_prefix, os.environ["PATH"])
|
||||
environ_update["VIRTUAL_ENV"] = venv
|
||||
|
||||
python_cmd = [self.python] + _ensure_list(command)
|
||||
|
||||
super(PythonRunner, self).__init__(module, python_cmd, arg_formats, default_args_order,
|
||||
check_rc, force_lang, path_prefix, environ_update)
|
||||
@@ -1,334 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by
|
||||
# Ansible still belong to the author of the module, and may assign their own
|
||||
# license to the complete work.
|
||||
#
|
||||
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
|
||||
#
|
||||
# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause)
|
||||
# SPDX-License-Identifier: BSD-2-Clause
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import os
|
||||
import re
|
||||
from uuid import UUID
|
||||
|
||||
from ansible.module_utils.six import text_type, binary_type
|
||||
|
||||
FINAL_STATUSES = ('ACTIVE', 'ERROR')
|
||||
VOLUME_STATUS = ('available', 'attaching', 'creating', 'deleting', 'in-use',
|
||||
'error', 'error_deleting')
|
||||
|
||||
CLB_ALGORITHMS = ['RANDOM', 'LEAST_CONNECTIONS', 'ROUND_ROBIN',
|
||||
'WEIGHTED_LEAST_CONNECTIONS', 'WEIGHTED_ROUND_ROBIN']
|
||||
CLB_PROTOCOLS = ['DNS_TCP', 'DNS_UDP', 'FTP', 'HTTP', 'HTTPS', 'IMAPS',
|
||||
'IMAPv4', 'LDAP', 'LDAPS', 'MYSQL', 'POP3', 'POP3S', 'SMTP',
|
||||
'TCP', 'TCP_CLIENT_FIRST', 'UDP', 'UDP_STREAM', 'SFTP']
|
||||
|
||||
NON_CALLABLES = (text_type, binary_type, bool, dict, int, list, type(None))
|
||||
PUBLIC_NET_ID = "00000000-0000-0000-0000-000000000000"
|
||||
SERVICE_NET_ID = "11111111-1111-1111-1111-111111111111"
|
||||
|
||||
|
||||
def rax_slugify(value):
|
||||
"""Prepend a key with rax_ and normalize the key name"""
|
||||
return 'rax_%s' % (re.sub(r'[^\w-]', '_', value).lower().lstrip('_'))
|
||||
|
||||
|
||||
def rax_clb_node_to_dict(obj):
|
||||
"""Function to convert a CLB Node object to a dict"""
|
||||
if not obj:
|
||||
return {}
|
||||
node = obj.to_dict()
|
||||
node['id'] = obj.id
|
||||
node['weight'] = obj.weight
|
||||
return node
|
||||
|
||||
|
||||
def rax_to_dict(obj, obj_type='standard'):
|
||||
"""Generic function to convert a pyrax object to a dict
|
||||
|
||||
obj_type values:
|
||||
standard
|
||||
clb
|
||||
server
|
||||
|
||||
"""
|
||||
instance = {}
|
||||
for key in dir(obj):
|
||||
value = getattr(obj, key)
|
||||
if obj_type == 'clb' and key == 'nodes':
|
||||
instance[key] = []
|
||||
for node in value:
|
||||
instance[key].append(rax_clb_node_to_dict(node))
|
||||
elif (isinstance(value, list) and len(value) > 0 and
|
||||
not isinstance(value[0], NON_CALLABLES)):
|
||||
instance[key] = []
|
||||
for item in value:
|
||||
instance[key].append(rax_to_dict(item))
|
||||
elif (isinstance(value, NON_CALLABLES) and not key.startswith('_')):
|
||||
if obj_type == 'server':
|
||||
if key == 'image':
|
||||
if not value:
|
||||
instance['rax_boot_source'] = 'volume'
|
||||
else:
|
||||
instance['rax_boot_source'] = 'local'
|
||||
key = rax_slugify(key)
|
||||
instance[key] = value
|
||||
|
||||
if obj_type == 'server':
|
||||
for attr in ['id', 'accessIPv4', 'name', 'status']:
|
||||
instance[attr] = instance.get(rax_slugify(attr))
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
def rax_find_bootable_volume(module, rax_module, server, exit=True):
|
||||
"""Find a servers bootable volume"""
|
||||
cs = rax_module.cloudservers
|
||||
cbs = rax_module.cloud_blockstorage
|
||||
server_id = rax_module.utils.get_id(server)
|
||||
volumes = cs.volumes.get_server_volumes(server_id)
|
||||
bootable_volumes = []
|
||||
for volume in volumes:
|
||||
vol = cbs.get(volume)
|
||||
if module.boolean(vol.bootable):
|
||||
bootable_volumes.append(vol)
|
||||
if not bootable_volumes:
|
||||
if exit:
|
||||
module.fail_json(msg='No bootable volumes could be found for '
|
||||
'server %s' % server_id)
|
||||
else:
|
||||
return False
|
||||
elif len(bootable_volumes) > 1:
|
||||
if exit:
|
||||
module.fail_json(msg='Multiple bootable volumes found for server '
|
||||
'%s' % server_id)
|
||||
else:
|
||||
return False
|
||||
|
||||
return bootable_volumes[0]
|
||||
|
||||
|
||||
def rax_find_image(module, rax_module, image, exit=True):
|
||||
"""Find a server image by ID or Name"""
|
||||
cs = rax_module.cloudservers
|
||||
try:
|
||||
UUID(image)
|
||||
except ValueError:
|
||||
try:
|
||||
image = cs.images.find(human_id=image)
|
||||
except (cs.exceptions.NotFound, cs.exceptions.NoUniqueMatch):
|
||||
try:
|
||||
image = cs.images.find(name=image)
|
||||
except (cs.exceptions.NotFound,
|
||||
cs.exceptions.NoUniqueMatch):
|
||||
if exit:
|
||||
module.fail_json(msg='No matching image found (%s)' %
|
||||
image)
|
||||
else:
|
||||
return False
|
||||
|
||||
return rax_module.utils.get_id(image)
|
||||
|
||||
|
||||
def rax_find_volume(module, rax_module, name):
|
||||
"""Find a Block storage volume by ID or name"""
|
||||
cbs = rax_module.cloud_blockstorage
|
||||
try:
|
||||
UUID(name)
|
||||
volume = cbs.get(name)
|
||||
except ValueError:
|
||||
try:
|
||||
volume = cbs.find(name=name)
|
||||
except rax_module.exc.NotFound:
|
||||
volume = None
|
||||
except Exception as e:
|
||||
module.fail_json(msg='%s' % e)
|
||||
return volume
|
||||
|
||||
|
||||
def rax_find_network(module, rax_module, network):
|
||||
"""Find a cloud network by ID or name"""
|
||||
cnw = rax_module.cloud_networks
|
||||
try:
|
||||
UUID(network)
|
||||
except ValueError:
|
||||
if network.lower() == 'public':
|
||||
return cnw.get_server_networks(PUBLIC_NET_ID)
|
||||
elif network.lower() == 'private':
|
||||
return cnw.get_server_networks(SERVICE_NET_ID)
|
||||
else:
|
||||
try:
|
||||
network_obj = cnw.find_network_by_label(network)
|
||||
except (rax_module.exceptions.NetworkNotFound,
|
||||
rax_module.exceptions.NetworkLabelNotUnique):
|
||||
module.fail_json(msg='No matching network found (%s)' %
|
||||
network)
|
||||
else:
|
||||
return cnw.get_server_networks(network_obj)
|
||||
else:
|
||||
return cnw.get_server_networks(network)
|
||||
|
||||
|
||||
def rax_find_server(module, rax_module, server):
|
||||
"""Find a Cloud Server by ID or name"""
|
||||
cs = rax_module.cloudservers
|
||||
try:
|
||||
UUID(server)
|
||||
server = cs.servers.get(server)
|
||||
except ValueError:
|
||||
servers = cs.servers.list(search_opts=dict(name='^%s$' % server))
|
||||
if not servers:
|
||||
module.fail_json(msg='No Server was matched by name, '
|
||||
'try using the Server ID instead')
|
||||
if len(servers) > 1:
|
||||
module.fail_json(msg='Multiple servers matched by name, '
|
||||
'try using the Server ID instead')
|
||||
|
||||
# We made it this far, grab the first and hopefully only server
|
||||
# in the list
|
||||
server = servers[0]
|
||||
return server
|
||||
|
||||
|
||||
def rax_find_loadbalancer(module, rax_module, loadbalancer):
|
||||
"""Find a Cloud Load Balancer by ID or name"""
|
||||
clb = rax_module.cloud_loadbalancers
|
||||
try:
|
||||
found = clb.get(loadbalancer)
|
||||
except Exception:
|
||||
found = []
|
||||
for lb in clb.list():
|
||||
if loadbalancer == lb.name:
|
||||
found.append(lb)
|
||||
|
||||
if not found:
|
||||
module.fail_json(msg='No loadbalancer was matched')
|
||||
|
||||
if len(found) > 1:
|
||||
module.fail_json(msg='Multiple loadbalancers matched')
|
||||
|
||||
# We made it this far, grab the first and hopefully only item
|
||||
# in the list
|
||||
found = found[0]
|
||||
|
||||
return found
|
||||
|
||||
|
||||
def rax_argument_spec():
|
||||
"""Return standard base dictionary used for the argument_spec
|
||||
argument in AnsibleModule
|
||||
|
||||
"""
|
||||
return dict(
|
||||
api_key=dict(type='str', aliases=['password'], no_log=True),
|
||||
auth_endpoint=dict(type='str'),
|
||||
credentials=dict(type='path', aliases=['creds_file']),
|
||||
env=dict(type='str'),
|
||||
identity_type=dict(type='str', default='rackspace'),
|
||||
region=dict(type='str'),
|
||||
tenant_id=dict(type='str'),
|
||||
tenant_name=dict(type='str'),
|
||||
username=dict(type='str'),
|
||||
validate_certs=dict(type='bool', aliases=['verify_ssl']),
|
||||
)
|
||||
|
||||
|
||||
def rax_required_together():
|
||||
"""Return the default list used for the required_together argument to
|
||||
AnsibleModule"""
|
||||
return [['api_key', 'username']]
|
||||
|
||||
|
||||
def setup_rax_module(module, rax_module, region_required=True):
|
||||
"""Set up pyrax in a standard way for all modules"""
|
||||
rax_module.USER_AGENT = 'ansible/%s %s' % (module.ansible_version,
|
||||
rax_module.USER_AGENT)
|
||||
|
||||
api_key = module.params.get('api_key')
|
||||
auth_endpoint = module.params.get('auth_endpoint')
|
||||
credentials = module.params.get('credentials')
|
||||
env = module.params.get('env')
|
||||
identity_type = module.params.get('identity_type')
|
||||
region = module.params.get('region')
|
||||
tenant_id = module.params.get('tenant_id')
|
||||
tenant_name = module.params.get('tenant_name')
|
||||
username = module.params.get('username')
|
||||
verify_ssl = module.params.get('validate_certs')
|
||||
|
||||
if env is not None:
|
||||
rax_module.set_environment(env)
|
||||
|
||||
rax_module.set_setting('identity_type', identity_type)
|
||||
if verify_ssl is not None:
|
||||
rax_module.set_setting('verify_ssl', verify_ssl)
|
||||
if auth_endpoint is not None:
|
||||
rax_module.set_setting('auth_endpoint', auth_endpoint)
|
||||
if tenant_id is not None:
|
||||
rax_module.set_setting('tenant_id', tenant_id)
|
||||
if tenant_name is not None:
|
||||
rax_module.set_setting('tenant_name', tenant_name)
|
||||
|
||||
try:
|
||||
username = username or os.environ.get('RAX_USERNAME')
|
||||
if not username:
|
||||
username = rax_module.get_setting('keyring_username')
|
||||
if username:
|
||||
api_key = 'USE_KEYRING'
|
||||
if not api_key:
|
||||
api_key = os.environ.get('RAX_API_KEY')
|
||||
credentials = (credentials or os.environ.get('RAX_CREDENTIALS') or
|
||||
os.environ.get('RAX_CREDS_FILE'))
|
||||
region = (region or os.environ.get('RAX_REGION') or
|
||||
rax_module.get_setting('region'))
|
||||
except KeyError as e:
|
||||
module.fail_json(msg='Unable to load %s' % e.message)
|
||||
|
||||
try:
|
||||
if api_key and username:
|
||||
if api_key == 'USE_KEYRING':
|
||||
rax_module.keyring_auth(username, region=region)
|
||||
else:
|
||||
rax_module.set_credentials(username, api_key=api_key,
|
||||
region=region)
|
||||
elif credentials:
|
||||
credentials = os.path.expanduser(credentials)
|
||||
rax_module.set_credential_file(credentials, region=region)
|
||||
else:
|
||||
raise Exception('No credentials supplied!')
|
||||
except Exception as e:
|
||||
if e.message:
|
||||
msg = str(e.message)
|
||||
else:
|
||||
msg = repr(e)
|
||||
module.fail_json(msg=msg)
|
||||
|
||||
if region_required and region not in rax_module.regions:
|
||||
module.fail_json(msg='%s is not a valid region, must be one of: %s' %
|
||||
(region, ','.join(rax_module.regions)))
|
||||
|
||||
return rax_module
|
||||
|
||||
|
||||
def rax_scaling_group_personality_file(module, files):
|
||||
if not files:
|
||||
return []
|
||||
|
||||
results = []
|
||||
for rpath, lpath in files.items():
|
||||
lpath = os.path.expanduser(lpath)
|
||||
try:
|
||||
with open(lpath, 'r') as f:
|
||||
results.append({
|
||||
'path': rpath,
|
||||
'contents': f.read(),
|
||||
})
|
||||
except Exception as e:
|
||||
module.fail_json(msg='Failed to load %s: %s' % (lpath, str(e)))
|
||||
return results
|
||||
@@ -11,6 +11,7 @@ import os
|
||||
import random
|
||||
import string
|
||||
import gzip
|
||||
import time
|
||||
from io import BytesIO
|
||||
from ansible.module_utils.urls import open_url
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
@@ -132,11 +133,13 @@ class RedfishUtils(object):
|
||||
return resp
|
||||
|
||||
# The following functions are to send GET/POST/PATCH/DELETE requests
|
||||
def get_request(self, uri, override_headers=None, allow_no_resp=False):
|
||||
def get_request(self, uri, override_headers=None, allow_no_resp=False, timeout=None):
|
||||
req_headers = dict(GET_HEADERS)
|
||||
if override_headers:
|
||||
req_headers.update(override_headers)
|
||||
username, password, basic_auth = self._auth_params(req_headers)
|
||||
if timeout is None:
|
||||
timeout = self.timeout
|
||||
try:
|
||||
# Service root is an unauthenticated resource; remove credentials
|
||||
# in case the caller will be using sessions later.
|
||||
@@ -146,7 +149,7 @@ class RedfishUtils(object):
|
||||
url_username=username, url_password=password,
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
follow_redirects='all',
|
||||
use_proxy=True, timeout=self.timeout)
|
||||
use_proxy=True, timeout=timeout)
|
||||
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
|
||||
try:
|
||||
if headers.get('content-encoding') == 'gzip' and LooseVersion(ansible_version) < LooseVersion('2.14'):
|
||||
@@ -624,6 +627,24 @@ class RedfishUtils(object):
|
||||
allowable_values = default_values
|
||||
return allowable_values
|
||||
|
||||
def check_service_availability(self):
|
||||
"""
|
||||
Checks if the service is accessible.
|
||||
|
||||
:return: dict containing the status of the service
|
||||
"""
|
||||
|
||||
# Get the service root
|
||||
# Override the timeout since the service root is expected to be readily
|
||||
# available.
|
||||
service_root = self.get_request(self.root_uri + self.service_root, timeout=10)
|
||||
if service_root['ret'] is False:
|
||||
# Failed, either due to a timeout or HTTP error; not available
|
||||
return {'ret': True, 'available': False}
|
||||
|
||||
# Successfully accessed the service root; available
|
||||
return {'ret': True, 'available': True}
|
||||
|
||||
def get_logs(self):
|
||||
log_svcs_uri_list = []
|
||||
list_of_logs = []
|
||||
@@ -1083,11 +1104,12 @@ class RedfishUtils(object):
|
||||
return self.manage_power(command, self.systems_uri,
|
||||
'#ComputerSystem.Reset')
|
||||
|
||||
def manage_manager_power(self, command):
|
||||
def manage_manager_power(self, command, wait=False, wait_timeout=120):
|
||||
return self.manage_power(command, self.manager_uri,
|
||||
'#Manager.Reset')
|
||||
'#Manager.Reset', wait, wait_timeout)
|
||||
|
||||
def manage_power(self, command, resource_uri, action_name):
|
||||
def manage_power(self, command, resource_uri, action_name, wait=False,
|
||||
wait_timeout=120):
|
||||
key = "Actions"
|
||||
reset_type_values = ['On', 'ForceOff', 'GracefulShutdown',
|
||||
'GracefulRestart', 'ForceRestart', 'Nmi',
|
||||
@@ -1147,6 +1169,30 @@ class RedfishUtils(object):
|
||||
response = self.post_request(self.root_uri + action_uri, payload)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
|
||||
# If requested to wait for the service to be available again, block
|
||||
# until it's ready
|
||||
if wait:
|
||||
elapsed_time = 0
|
||||
start_time = time.time()
|
||||
# Start with a large enough sleep. Some services will process new
|
||||
# requests while in the middle of shutting down, thus breaking out
|
||||
# early.
|
||||
time.sleep(30)
|
||||
|
||||
# Periodically check for the service's availability.
|
||||
while elapsed_time <= wait_timeout:
|
||||
status = self.check_service_availability()
|
||||
if status['available']:
|
||||
# It's available; we're done
|
||||
break
|
||||
time.sleep(5)
|
||||
elapsed_time = time.time() - start_time
|
||||
|
||||
if elapsed_time > wait_timeout:
|
||||
# Exhausted the wait timer; error
|
||||
return {'ret': False, 'changed': True,
|
||||
'msg': 'The service did not become available after %d seconds' % wait_timeout}
|
||||
return {'ret': True, 'changed': True}
|
||||
|
||||
def manager_reset_to_defaults(self, command):
|
||||
@@ -3679,8 +3725,8 @@ class RedfishUtils(object):
|
||||
'msg': "Provided Storage Subsystem ID %s does not exist on the server" % storage_subsystem_id}
|
||||
|
||||
# Validate input parameters
|
||||
required_parameters = ['RAIDType', 'Drives']
|
||||
allowed_parameters = ['CapacityBytes', 'DisplayName', 'InitializeMethod', 'MediaSpanCount',
|
||||
required_parameters = ['RAIDType', 'Drives', 'CapacityBytes']
|
||||
allowed_parameters = ['DisplayName', 'InitializeMethod', 'MediaSpanCount',
|
||||
'Name', 'ReadCachePolicy', 'StripSizeBytes', 'VolumeUsage', 'WriteCachePolicy']
|
||||
|
||||
for parameter in required_parameters:
|
||||
@@ -3786,7 +3832,7 @@ class RedfishUtils(object):
|
||||
vendor = self._get_vendor()['Vendor']
|
||||
rsp_uri = ""
|
||||
for loc in resp_data['Location']:
|
||||
if loc['Language'].startswith("en"):
|
||||
if loc['Language'] == "en":
|
||||
rsp_uri = loc['Uri']
|
||||
if vendor == 'HPE':
|
||||
# WORKAROUND
|
||||
|
||||
@@ -15,10 +15,8 @@ __metaclass__ = type
|
||||
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
import types
|
||||
|
||||
from ansible.module_utils.six.moves import configparser
|
||||
|
||||
@@ -76,241 +74,3 @@ class RegistrationBase(object):
|
||||
|
||||
def subscribe(self, **kwargs):
|
||||
raise NotImplementedError("Must be implemented by a sub-class")
|
||||
|
||||
|
||||
class Rhsm(RegistrationBase):
|
||||
"""
|
||||
DEPRECATION WARNING
|
||||
|
||||
This class is deprecated and will be removed in community.general 9.0.0.
|
||||
There is no replacement for it; please contact the community.general
|
||||
maintainers in case you are using it.
|
||||
"""
|
||||
|
||||
def __init__(self, module, username=None, password=None):
|
||||
RegistrationBase.__init__(self, module, username, password)
|
||||
self.config = self._read_config()
|
||||
self.module = module
|
||||
self.module.deprecate(
|
||||
'The Rhsm class is deprecated with no replacement.',
|
||||
version='9.0.0',
|
||||
collection_name='community.general',
|
||||
)
|
||||
|
||||
def _read_config(self, rhsm_conf='/etc/rhsm/rhsm.conf'):
|
||||
'''
|
||||
Load RHSM configuration from /etc/rhsm/rhsm.conf.
|
||||
Returns:
|
||||
* ConfigParser object
|
||||
'''
|
||||
|
||||
# Read RHSM defaults ...
|
||||
cp = configparser.ConfigParser()
|
||||
cp.read(rhsm_conf)
|
||||
|
||||
# Add support for specifying a default value w/o having to standup some configuration
|
||||
# Yeah, I know this should be subclassed ... but, oh well
|
||||
def get_option_default(self, key, default=''):
|
||||
sect, opt = key.split('.', 1)
|
||||
if self.has_section(sect) and self.has_option(sect, opt):
|
||||
return self.get(sect, opt)
|
||||
else:
|
||||
return default
|
||||
|
||||
cp.get_option = types.MethodType(get_option_default, cp, configparser.ConfigParser)
|
||||
|
||||
return cp
|
||||
|
||||
def enable(self):
|
||||
'''
|
||||
Enable the system to receive updates from subscription-manager.
|
||||
This involves updating affected yum plugins and removing any
|
||||
conflicting yum repositories.
|
||||
'''
|
||||
RegistrationBase.enable(self)
|
||||
self.update_plugin_conf('rhnplugin', False)
|
||||
self.update_plugin_conf('subscription-manager', True)
|
||||
|
||||
def configure(self, **kwargs):
|
||||
'''
|
||||
Configure the system as directed for registration with RHN
|
||||
Raises:
|
||||
* Exception - if error occurs while running command
|
||||
'''
|
||||
args = ['subscription-manager', 'config']
|
||||
|
||||
# Pass supplied **kwargs as parameters to subscription-manager. Ignore
|
||||
# non-configuration parameters and replace '_' with '.'. For example,
|
||||
# 'server_hostname' becomes '--system.hostname'.
|
||||
for k, v in kwargs.items():
|
||||
if re.search(r'^(system|rhsm)_', k):
|
||||
args.append('--%s=%s' % (k.replace('_', '.'), v))
|
||||
|
||||
self.module.run_command(args, check_rc=True)
|
||||
|
||||
@property
|
||||
def is_registered(self):
|
||||
'''
|
||||
Determine whether the current system
|
||||
Returns:
|
||||
* Boolean - whether the current system is currently registered to
|
||||
RHN.
|
||||
'''
|
||||
args = ['subscription-manager', 'identity']
|
||||
rc, stdout, stderr = self.module.run_command(args, check_rc=False)
|
||||
if rc == 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def register(self, username, password, autosubscribe, activationkey):
|
||||
'''
|
||||
Register the current system to the provided RHN server
|
||||
Raises:
|
||||
* Exception - if error occurs while running command
|
||||
'''
|
||||
args = ['subscription-manager', 'register']
|
||||
|
||||
# Generate command arguments
|
||||
if activationkey:
|
||||
args.append('--activationkey "%s"' % activationkey)
|
||||
else:
|
||||
if autosubscribe:
|
||||
args.append('--autosubscribe')
|
||||
if username:
|
||||
args.extend(['--username', username])
|
||||
if password:
|
||||
args.extend(['--password', password])
|
||||
|
||||
# Do the needful...
|
||||
rc, stderr, stdout = self.module.run_command(args, check_rc=True)
|
||||
|
||||
def unsubscribe(self):
|
||||
'''
|
||||
Unsubscribe a system from all subscribed channels
|
||||
Raises:
|
||||
* Exception - if error occurs while running command
|
||||
'''
|
||||
args = ['subscription-manager', 'unsubscribe', '--all']
|
||||
rc, stderr, stdout = self.module.run_command(args, check_rc=True)
|
||||
|
||||
def unregister(self):
|
||||
'''
|
||||
Unregister a currently registered system
|
||||
Raises:
|
||||
* Exception - if error occurs while running command
|
||||
'''
|
||||
args = ['subscription-manager', 'unregister']
|
||||
rc, stderr, stdout = self.module.run_command(args, check_rc=True)
|
||||
self.update_plugin_conf('rhnplugin', False)
|
||||
self.update_plugin_conf('subscription-manager', False)
|
||||
|
||||
def subscribe(self, regexp):
|
||||
'''
|
||||
Subscribe current system to available pools matching the specified
|
||||
regular expression
|
||||
Raises:
|
||||
* Exception - if error occurs while running command
|
||||
'''
|
||||
|
||||
# Available pools ready for subscription
|
||||
available_pools = RhsmPools(self.module)
|
||||
|
||||
for pool in available_pools.filter(regexp):
|
||||
pool.subscribe()
|
||||
|
||||
|
||||
class RhsmPool(object):
|
||||
"""
|
||||
Convenience class for housing subscription information
|
||||
|
||||
DEPRECATION WARNING
|
||||
|
||||
This class is deprecated and will be removed in community.general 9.0.0.
|
||||
There is no replacement for it; please contact the community.general
|
||||
maintainers in case you are using it.
|
||||
"""
|
||||
|
||||
def __init__(self, module, **kwargs):
|
||||
self.module = module
|
||||
for k, v in kwargs.items():
|
||||
setattr(self, k, v)
|
||||
self.module.deprecate(
|
||||
'The RhsmPool class is deprecated with no replacement.',
|
||||
version='9.0.0',
|
||||
collection_name='community.general',
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.__getattribute__('_name'))
|
||||
|
||||
def subscribe(self):
|
||||
args = "subscription-manager subscribe --pool %s" % self.PoolId
|
||||
rc, stdout, stderr = self.module.run_command(args, check_rc=True)
|
||||
if rc == 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
class RhsmPools(object):
|
||||
"""
|
||||
This class is used for manipulating pools subscriptions with RHSM
|
||||
|
||||
DEPRECATION WARNING
|
||||
|
||||
This class is deprecated and will be removed in community.general 9.0.0.
|
||||
There is no replacement for it; please contact the community.general
|
||||
maintainers in case you are using it.
|
||||
"""
|
||||
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
self.products = self._load_product_list()
|
||||
self.module.deprecate(
|
||||
'The RhsmPools class is deprecated with no replacement.',
|
||||
version='9.0.0',
|
||||
collection_name='community.general',
|
||||
)
|
||||
|
||||
def __iter__(self):
|
||||
return self.products.__iter__()
|
||||
|
||||
def _load_product_list(self):
|
||||
"""
|
||||
Loads list of all available pools for system in data structure
|
||||
"""
|
||||
args = "subscription-manager list --available"
|
||||
rc, stdout, stderr = self.module.run_command(args, check_rc=True)
|
||||
|
||||
products = []
|
||||
for line in stdout.split('\n'):
|
||||
# Remove leading+trailing whitespace
|
||||
line = line.strip()
|
||||
# An empty line implies the end of an output group
|
||||
if len(line) == 0:
|
||||
continue
|
||||
# If a colon ':' is found, parse
|
||||
elif ':' in line:
|
||||
(key, value) = line.split(':', 1)
|
||||
key = key.strip().replace(" ", "") # To unify
|
||||
value = value.strip()
|
||||
if key in ['ProductName', 'SubscriptionName']:
|
||||
# Remember the name for later processing
|
||||
products.append(RhsmPool(self.module, _name=value, key=value))
|
||||
elif products:
|
||||
# Associate value with most recently recorded product
|
||||
products[-1].__setattr__(key, value)
|
||||
# FIXME - log some warning?
|
||||
# else:
|
||||
# warnings.warn("Unhandled subscription key/value: %s/%s" % (key,value))
|
||||
return products
|
||||
|
||||
def filter(self, regexp='^$'):
|
||||
'''
|
||||
Return a list of RhsmPools whose name matches the provided regular expression
|
||||
'''
|
||||
r = re.compile(regexp)
|
||||
for product in self.products:
|
||||
if r.search(product._name):
|
||||
yield product
|
||||
|
||||
@@ -28,7 +28,7 @@ def api_argument_spec():
|
||||
return api_argument_spec
|
||||
|
||||
|
||||
def api_request(module, endpoint, data=None, method="GET"):
|
||||
def api_request(module, endpoint, data=None, method="GET", content_type="application/json"):
|
||||
"""Manages Rundeck API requests via HTTP(S)
|
||||
|
||||
:arg module: The AnsibleModule (used to get url, api_version, api_token, etc).
|
||||
@@ -63,7 +63,7 @@ def api_request(module, endpoint, data=None, method="GET"):
|
||||
data=json.dumps(data),
|
||||
method=method,
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"Content-Type": content_type,
|
||||
"Accept": "application/json",
|
||||
"X-Rundeck-Auth-Token": module.params["api_token"]
|
||||
}
|
||||
|
||||
@@ -100,7 +100,7 @@ class _Variable(object):
|
||||
return
|
||||
|
||||
def __str__(self):
|
||||
return "<_Variable: value={0!r}, initial={1!r}, diff={2}, output={3}, change={4}, verbosity={5}>".format(
|
||||
return "<Variable: value={0!r}, initial={1!r}, diff={2}, output={3}, change={4}, verbosity={5}>".format(
|
||||
self.value, self.initial_value, self.diff, self.output, self.change, self.verbosity
|
||||
)
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@ import datetime
|
||||
import re
|
||||
import time
|
||||
import tarfile
|
||||
import os
|
||||
|
||||
from ansible.module_utils.urls import fetch_file
|
||||
from ansible_collections.community.general.plugins.module_utils.redfish_utils import RedfishUtils
|
||||
@@ -80,25 +79,19 @@ class WdcRedfishUtils(RedfishUtils):
|
||||
return response
|
||||
return self._find_updateservice_additional_uris()
|
||||
|
||||
def _is_enclosure_multi_tenant_and_fetch_gen(self):
|
||||
def _is_enclosure_multi_tenant(self):
|
||||
"""Determine if the enclosure is multi-tenant.
|
||||
|
||||
The serial number of a multi-tenant enclosure will end in "-A" or "-B".
|
||||
Fetching enclsoure generation.
|
||||
|
||||
:return: True/False if the enclosure is multi-tenant or not and return enclosure generation;
|
||||
None if unable to determine.
|
||||
:return: True/False if the enclosure is multi-tenant or not; None if unable to determine.
|
||||
"""
|
||||
response = self.get_request(self.root_uri + self.service_root + "Chassis/Enclosure")
|
||||
if response['ret'] is False:
|
||||
return None
|
||||
pattern = r".*-[A,B]"
|
||||
data = response['data']
|
||||
if 'EnclVersion' not in data:
|
||||
enc_version = 'G1'
|
||||
else:
|
||||
enc_version = data['EnclVersion']
|
||||
return re.match(pattern, data['SerialNumber']) is not None, enc_version
|
||||
return re.match(pattern, data['SerialNumber']) is not None
|
||||
|
||||
def _find_updateservice_additional_uris(self):
|
||||
"""Find & set WDC-specific update service URIs"""
|
||||
@@ -187,44 +180,15 @@ class WdcRedfishUtils(RedfishUtils):
|
||||
To determine if the bundle is multi-tenant or not, it looks inside the .bin file within the tarfile,
|
||||
and checks the appropriate byte in the file.
|
||||
|
||||
If not tarfile, the bundle is checked for 2048th byte to determine whether it is Gen2 bundle.
|
||||
Gen2 is always single tenant at this time.
|
||||
|
||||
:param str bundle_uri: HTTP URI of the firmware bundle.
|
||||
:return: Firmware version number contained in the bundle, whether or not the bundle is multi-tenant
|
||||
and bundle generation. Either value will be None if unable to determine.
|
||||
:return: Firmware version number contained in the bundle, and whether or not the bundle is multi-tenant.
|
||||
Either value will be None if unable to determine.
|
||||
:rtype: str or None, bool or None
|
||||
"""
|
||||
bundle_temp_filename = fetch_file(module=self.module,
|
||||
url=bundle_uri)
|
||||
bundle_version = None
|
||||
is_multi_tenant = None
|
||||
gen = None
|
||||
|
||||
# If not tarfile, then if the file has "MMG2" or "DPG2" at 2048th byte
|
||||
# then the bundle is for MM or DP G2
|
||||
if not tarfile.is_tarfile(bundle_temp_filename):
|
||||
cookie1 = None
|
||||
with open(bundle_temp_filename, "rb") as bundle_file:
|
||||
file_size = os.path.getsize(bundle_temp_filename)
|
||||
if file_size >= 2052:
|
||||
bundle_file.seek(2048)
|
||||
cookie1 = bundle_file.read(4)
|
||||
# It is anticipated that DP firmware bundle will be having the value "DPG2"
|
||||
# for cookie1 in the header
|
||||
if cookie1 and cookie1.decode("utf8") == "MMG2" or cookie1.decode("utf8") == "DPG2":
|
||||
file_name, ext = os.path.splitext(str(bundle_uri.rsplit('/', 1)[1]))
|
||||
# G2 bundle file name: Ultrastar-Data102_3000_SEP_1010-032_2.1.12
|
||||
parsedFileName = file_name.split('_')
|
||||
if len(parsedFileName) == 5:
|
||||
bundle_version = parsedFileName[4]
|
||||
# MM G2 is always single tanant
|
||||
is_multi_tenant = False
|
||||
gen = "G2"
|
||||
|
||||
return bundle_version, is_multi_tenant, gen
|
||||
|
||||
# Bundle is for MM or DP G1
|
||||
return None, None
|
||||
tf = tarfile.open(bundle_temp_filename)
|
||||
pattern_pkg = r"oobm-(.+)\.pkg"
|
||||
pattern_bin = r"(.*\.bin)"
|
||||
@@ -241,9 +205,8 @@ class WdcRedfishUtils(RedfishUtils):
|
||||
bin_file.seek(11)
|
||||
byte_11 = bin_file.read(1)
|
||||
is_multi_tenant = byte_11 == b'\x80'
|
||||
gen = "G1"
|
||||
|
||||
return bundle_version, is_multi_tenant, gen
|
||||
return bundle_version, is_multi_tenant
|
||||
|
||||
@staticmethod
|
||||
def uri_is_http(uri):
|
||||
@@ -304,16 +267,15 @@ class WdcRedfishUtils(RedfishUtils):
|
||||
# Check the FW version in the bundle file, and compare it to what is already on the IOMs
|
||||
|
||||
# Bundle version number
|
||||
bundle_firmware_version, is_bundle_multi_tenant, bundle_gen = self._get_bundle_version(bundle_uri)
|
||||
if bundle_firmware_version is None or is_bundle_multi_tenant is None or bundle_gen is None:
|
||||
bundle_firmware_version, is_bundle_multi_tenant = self._get_bundle_version(bundle_uri)
|
||||
if bundle_firmware_version is None or is_bundle_multi_tenant is None:
|
||||
return {
|
||||
'ret': False,
|
||||
'msg': 'Unable to extract bundle version or multi-tenant status or generation from update image file'
|
||||
'msg': 'Unable to extract bundle version or multi-tenant status from update image tarfile'
|
||||
}
|
||||
|
||||
is_enclosure_multi_tenant, enclosure_gen = self._is_enclosure_multi_tenant_and_fetch_gen()
|
||||
|
||||
# Verify that the bundle is correctly multi-tenant or not
|
||||
is_enclosure_multi_tenant = self._is_enclosure_multi_tenant()
|
||||
if is_enclosure_multi_tenant != is_bundle_multi_tenant:
|
||||
return {
|
||||
'ret': False,
|
||||
@@ -323,16 +285,6 @@ class WdcRedfishUtils(RedfishUtils):
|
||||
)
|
||||
}
|
||||
|
||||
# Verify that the bundle is compliant with the target enclosure
|
||||
if enclosure_gen != bundle_gen:
|
||||
return {
|
||||
'ret': False,
|
||||
'msg': 'Enclosure generation is {0} but bundle is of {1}'.format(
|
||||
enclosure_gen,
|
||||
bundle_gen,
|
||||
)
|
||||
}
|
||||
|
||||
# Version number installed on IOMs
|
||||
firmware_inventory = self.get_firmware_inventory()
|
||||
if not firmware_inventory["ret"]:
|
||||
|
||||
@@ -192,7 +192,6 @@ def main():
|
||||
rmitab = module.get_bin_path('rmitab')
|
||||
chitab = module.get_bin_path('chitab')
|
||||
rc = 0
|
||||
err = None
|
||||
|
||||
# check if the new entry exists
|
||||
current_entry = check_current_entry(module)
|
||||
|
||||
@@ -240,8 +240,6 @@ def main():
|
||||
state = module.params['state']
|
||||
pvs = module.params['pvs']
|
||||
|
||||
pv_list = ' '.join(pvs)
|
||||
|
||||
if policy == 'maximum':
|
||||
lv_policy = 'x'
|
||||
else:
|
||||
@@ -249,16 +247,16 @@ def main():
|
||||
|
||||
# Add echo command when running in check-mode
|
||||
if module.check_mode:
|
||||
test_opt = 'echo '
|
||||
test_opt = [module.get_bin_path("echo", required=True)]
|
||||
else:
|
||||
test_opt = ''
|
||||
test_opt = []
|
||||
|
||||
# check if system commands are available
|
||||
lsvg_cmd = module.get_bin_path("lsvg", required=True)
|
||||
lslv_cmd = module.get_bin_path("lslv", required=True)
|
||||
|
||||
# Get information on volume group requested
|
||||
rc, vg_info, err = module.run_command("%s %s" % (lsvg_cmd, vg))
|
||||
rc, vg_info, err = module.run_command([lsvg_cmd, vg])
|
||||
|
||||
if rc != 0:
|
||||
if state == 'absent':
|
||||
@@ -273,8 +271,7 @@ def main():
|
||||
lv_size = round_ppsize(convert_size(module, size), base=this_vg['pp_size'])
|
||||
|
||||
# Get information on logical volume requested
|
||||
rc, lv_info, err = module.run_command(
|
||||
"%s %s" % (lslv_cmd, lv))
|
||||
rc, lv_info, err = module.run_command([lslv_cmd, lv])
|
||||
|
||||
if rc != 0:
|
||||
if state == 'absent':
|
||||
@@ -296,7 +293,7 @@ def main():
|
||||
# create LV
|
||||
mklv_cmd = module.get_bin_path("mklv", required=True)
|
||||
|
||||
cmd = "%s %s -t %s -y %s -c %s -e %s %s %s %sM %s" % (test_opt, mklv_cmd, lv_type, lv, copies, lv_policy, opts, vg, lv_size, pv_list)
|
||||
cmd = test_opt + [mklv_cmd, "-t", lv_type, "-y", lv, "-c", copies, "-e", lv_policy, opts, vg, "%sM" % (lv_size, )] + pvs
|
||||
rc, out, err = module.run_command(cmd)
|
||||
if rc == 0:
|
||||
module.exit_json(changed=True, msg="Logical volume %s created." % lv)
|
||||
@@ -306,7 +303,7 @@ def main():
|
||||
if state == 'absent':
|
||||
# remove LV
|
||||
rmlv_cmd = module.get_bin_path("rmlv", required=True)
|
||||
rc, out, err = module.run_command("%s %s -f %s" % (test_opt, rmlv_cmd, this_lv['name']))
|
||||
rc, out, err = module.run_command(test_opt + [rmlv_cmd, "-f", this_lv['name']])
|
||||
if rc == 0:
|
||||
module.exit_json(changed=True, msg="Logical volume %s deleted." % lv)
|
||||
else:
|
||||
@@ -315,7 +312,7 @@ def main():
|
||||
if this_lv['policy'] != policy:
|
||||
# change lv allocation policy
|
||||
chlv_cmd = module.get_bin_path("chlv", required=True)
|
||||
rc, out, err = module.run_command("%s %s -e %s %s" % (test_opt, chlv_cmd, lv_policy, this_lv['name']))
|
||||
rc, out, err = module.run_command(test_opt + [chlv_cmd, "-e", lv_policy, this_lv['name']])
|
||||
if rc == 0:
|
||||
module.exit_json(changed=True, msg="Logical volume %s policy changed: %s." % (lv, policy))
|
||||
else:
|
||||
@@ -331,7 +328,7 @@ def main():
|
||||
# resize LV based on absolute values
|
||||
if int(lv_size) > this_lv['size']:
|
||||
extendlv_cmd = module.get_bin_path("extendlv", required=True)
|
||||
cmd = "%s %s %s %sM" % (test_opt, extendlv_cmd, lv, lv_size - this_lv['size'])
|
||||
cmd = test_opt + [extendlv_cmd, lv, "%sM" % (lv_size - this_lv['size'], )]
|
||||
rc, out, err = module.run_command(cmd)
|
||||
if rc == 0:
|
||||
module.exit_json(changed=True, msg="Logical volume %s size extended to %sMB." % (lv, lv_size))
|
||||
|
||||
@@ -32,6 +32,19 @@ attributes:
|
||||
diff_mode:
|
||||
support: none
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- >
|
||||
If O(state=present) then the collection or role will be installed.
|
||||
Note that the collections and roles are not updated with this option.
|
||||
- >
|
||||
Currently the O(state=latest) is ignored unless O(type=collection), and it will
|
||||
ensure the collection is installed and updated to the latest available version.
|
||||
- Please note that O(force=true) can be used to perform upgrade regardless of O(type).
|
||||
type: str
|
||||
choices: [ present, latest ]
|
||||
default: present
|
||||
version_added: 9.1.0
|
||||
type:
|
||||
description:
|
||||
- The type of installation performed by C(ansible-galaxy).
|
||||
@@ -69,20 +82,11 @@ options:
|
||||
default: false
|
||||
force:
|
||||
description:
|
||||
- Force overwriting an existing role or collection.
|
||||
- Force overwriting existing roles and/or collections.
|
||||
- It can be used for upgrading, but the module output will always report C(changed=true).
|
||||
- Using O(force=true) is mandatory when downgrading.
|
||||
type: bool
|
||||
default: false
|
||||
ack_ansible29:
|
||||
description:
|
||||
- This option has no longer any effect and will be removed in community.general 9.0.0.
|
||||
type: bool
|
||||
default: false
|
||||
ack_min_ansiblecore211:
|
||||
description:
|
||||
- This option has no longer any effect and will be removed in community.general 9.0.0.
|
||||
type: bool
|
||||
default: false
|
||||
"""
|
||||
|
||||
EXAMPLES = """
|
||||
@@ -181,7 +185,7 @@ RETURN = """
|
||||
|
||||
import re
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.cmd_runner import CmdRunner, cmd_runner_fmt as fmt
|
||||
from ansible_collections.community.general.plugins.module_utils.cmd_runner import CmdRunner, cmd_runner_fmt
|
||||
from ansible_collections.community.general.plugins.module_utils.module_helper import ModuleHelper, ModuleHelperException
|
||||
|
||||
|
||||
@@ -190,47 +194,40 @@ class AnsibleGalaxyInstall(ModuleHelper):
|
||||
_RE_LIST_PATH = re.compile(r'^# (?P<path>.*)$')
|
||||
_RE_LIST_COLL = re.compile(r'^(?P<elem>\w+\.\w+)\s+(?P<version>[\d\.]+)\s*$')
|
||||
_RE_LIST_ROLE = re.compile(r'^- (?P<elem>\w+\.\w+),\s+(?P<version>[\d\.]+)\s*$')
|
||||
_RE_INSTALL_OUTPUT = None # Set after determining ansible version, see __init_module__()
|
||||
_RE_INSTALL_OUTPUT = re.compile(
|
||||
r'^(?:(?P<collection>\w+\.\w+):(?P<cversion>[\d\.]+)|- (?P<role>\w+\.\w+) \((?P<rversion>[\d\.]+)\)) was installed successfully$'
|
||||
)
|
||||
ansible_version = None
|
||||
|
||||
output_params = ('type', 'name', 'dest', 'requirements_file', 'force', 'no_deps')
|
||||
module = dict(
|
||||
argument_spec=dict(
|
||||
state=dict(type='str', choices=['present', 'latest'], default='present'),
|
||||
type=dict(type='str', choices=('collection', 'role', 'both'), required=True),
|
||||
name=dict(type='str'),
|
||||
requirements_file=dict(type='path'),
|
||||
dest=dict(type='path'),
|
||||
force=dict(type='bool', default=False),
|
||||
no_deps=dict(type='bool', default=False),
|
||||
ack_ansible29=dict(
|
||||
type='bool',
|
||||
default=False,
|
||||
removed_in_version='9.0.0',
|
||||
removed_from_collection='community.general',
|
||||
),
|
||||
ack_min_ansiblecore211=dict(
|
||||
type='bool',
|
||||
default=False,
|
||||
removed_in_version='9.0.0',
|
||||
removed_from_collection='community.general',
|
||||
),
|
||||
),
|
||||
mutually_exclusive=[('name', 'requirements_file')],
|
||||
required_one_of=[('name', 'requirements_file')],
|
||||
required_if=[('type', 'both', ['requirements_file'])],
|
||||
supports_check_mode=False,
|
||||
)
|
||||
use_old_vardict = False
|
||||
|
||||
command = 'ansible-galaxy'
|
||||
command_args_formats = dict(
|
||||
type=fmt.as_func(lambda v: [] if v == 'both' else [v]),
|
||||
galaxy_cmd=fmt.as_list(),
|
||||
requirements_file=fmt.as_opt_val('-r'),
|
||||
dest=fmt.as_opt_val('-p'),
|
||||
force=fmt.as_bool("--force"),
|
||||
no_deps=fmt.as_bool("--no-deps"),
|
||||
version=fmt.as_bool("--version"),
|
||||
name=fmt.as_list(),
|
||||
type=cmd_runner_fmt.as_func(lambda v: [] if v == 'both' else [v]),
|
||||
galaxy_cmd=cmd_runner_fmt.as_list(),
|
||||
upgrade=cmd_runner_fmt.as_bool("--upgrade"),
|
||||
requirements_file=cmd_runner_fmt.as_opt_val('-r'),
|
||||
dest=cmd_runner_fmt.as_opt_val('-p'),
|
||||
force=cmd_runner_fmt.as_bool("--force"),
|
||||
no_deps=cmd_runner_fmt.as_bool("--no-deps"),
|
||||
version=cmd_runner_fmt.as_fixed("--version"),
|
||||
name=cmd_runner_fmt.as_list(),
|
||||
)
|
||||
|
||||
def _make_runner(self, lang):
|
||||
@@ -254,25 +251,16 @@ class AnsibleGalaxyInstall(ModuleHelper):
|
||||
try:
|
||||
runner = self._make_runner("C.UTF-8")
|
||||
with runner("version", check_rc=False, output_process=process) as ctx:
|
||||
return runner, ctx.run(version=True)
|
||||
except UnsupportedLocale as e:
|
||||
return runner, ctx.run()
|
||||
except UnsupportedLocale:
|
||||
runner = self._make_runner("en_US.UTF-8")
|
||||
with runner("version", check_rc=True, output_process=process) as ctx:
|
||||
return runner, ctx.run(version=True)
|
||||
return runner, ctx.run()
|
||||
|
||||
def __init_module__(self):
|
||||
# self.runner = CmdRunner(self.module, command=self.command, arg_formats=self.command_args_formats, force_lang=self.force_lang)
|
||||
self.runner, self.ansible_version = self._get_ansible_galaxy_version()
|
||||
if self.ansible_version < (2, 11):
|
||||
self.module.fail_json(
|
||||
msg="Support for Ansible 2.9 and ansible-base 2.10 has ben removed."
|
||||
)
|
||||
# Collection install output changed:
|
||||
# ansible-base 2.10: "coll.name (x.y.z)"
|
||||
# ansible-core 2.11+: "coll.name:x.y.z"
|
||||
self._RE_INSTALL_OUTPUT = re.compile(r'^(?:(?P<collection>\w+\.\w+)(?: \(|:)(?P<cversion>[\d\.]+)\)?'
|
||||
r'|- (?P<role>\w+\.\w+) \((?P<rversion>[\d\.]+)\))'
|
||||
r' was installed successfully$')
|
||||
self.module.fail_json(msg="Support for Ansible 2.9 and ansible-base 2.10 has been removed.")
|
||||
self.vars.set("new_collections", {}, change=True)
|
||||
self.vars.set("new_roles", {}, change=True)
|
||||
if self.vars.type != "collection":
|
||||
@@ -325,8 +313,9 @@ class AnsibleGalaxyInstall(ModuleHelper):
|
||||
elif match.group("role"):
|
||||
self.vars.new_roles[match.group("role")] = match.group("rversion")
|
||||
|
||||
with self.runner("type galaxy_cmd force no_deps dest requirements_file name", output_process=process) as ctx:
|
||||
ctx.run(galaxy_cmd="install")
|
||||
upgrade = (self.vars.type == "collection" and self.vars.state == "latest")
|
||||
with self.runner("type galaxy_cmd upgrade force no_deps dest requirements_file name", output_process=process) as ctx:
|
||||
ctx.run(galaxy_cmd="install", upgrade=upgrade)
|
||||
if self.verbosity > 2:
|
||||
self.vars.set("run_info", ctx.run_info)
|
||||
|
||||
|
||||
@@ -74,7 +74,6 @@ options:
|
||||
world:
|
||||
description:
|
||||
- Use a custom world file when checking for explicitly installed packages.
|
||||
The file is used only when a value is provided for O(name), and O(state) is set to V(present) or V(latest).
|
||||
type: str
|
||||
default: /etc/apk/world
|
||||
version_added: 5.4.0
|
||||
|
||||
@@ -170,7 +170,7 @@ def local_rpm_package_name(path):
|
||||
def query_package(module, name):
|
||||
# rpm -q returns 0 if the package is installed,
|
||||
# 1 if it is not installed
|
||||
rc, out, err = module.run_command("%s -q %s" % (RPM_PATH, name))
|
||||
rc, out, err = module.run_command([RPM_PATH, "-q", name])
|
||||
if rc == 0:
|
||||
return True
|
||||
else:
|
||||
@@ -203,7 +203,7 @@ def query_package_provides(module, name, allow_upgrade=False):
|
||||
|
||||
name = local_rpm_package_name(name)
|
||||
|
||||
rc, out, err = module.run_command("%s -q --provides %s" % (RPM_PATH, name))
|
||||
rc, out, err = module.run_command([RPM_PATH, "-q", "--provides", name])
|
||||
if rc == 0:
|
||||
if not allow_upgrade:
|
||||
return True
|
||||
@@ -253,7 +253,7 @@ def remove_packages(module, packages):
|
||||
if not query_package(module, package):
|
||||
continue
|
||||
|
||||
rc, out, err = module.run_command("%s -y remove %s" % (APT_PATH, package), environ_update={"LANG": "C"})
|
||||
rc, out, err = module.run_command([APT_PATH, "-y", "remove", package], environ_update={"LANG": "C"})
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed to remove %s: %s" % (package, err))
|
||||
@@ -271,14 +271,14 @@ def install_packages(module, pkgspec, allow_upgrade=False):
|
||||
if pkgspec is None:
|
||||
return (False, "Empty package list")
|
||||
|
||||
packages = ""
|
||||
packages = []
|
||||
for package in pkgspec:
|
||||
if not query_package_provides(module, package, allow_upgrade=allow_upgrade):
|
||||
packages += "'%s' " % package
|
||||
packages.append(package)
|
||||
|
||||
if len(packages) != 0:
|
||||
|
||||
rc, out, err = module.run_command("%s -y install %s" % (APT_PATH, packages), environ_update={"LANG": "C"})
|
||||
if packages:
|
||||
command = [APT_PATH, "-y", "install"] + packages
|
||||
rc, out, err = module.run_command(command, environ_update={"LANG": "C"})
|
||||
|
||||
installed = True
|
||||
for package in pkgspec:
|
||||
@@ -287,7 +287,7 @@ def install_packages(module, pkgspec, allow_upgrade=False):
|
||||
|
||||
# apt-rpm always have 0 for exit code if --force is used
|
||||
if rc or not installed:
|
||||
module.fail_json(msg="'apt-get -y install %s' failed: %s" % (packages, err))
|
||||
module.fail_json(msg="'%s' failed: %s" % (" ".join(command), err))
|
||||
else:
|
||||
return (True, "%s present(s)" % packages)
|
||||
else:
|
||||
@@ -310,6 +310,18 @@ def main():
|
||||
module.fail_json(msg="cannot find /usr/bin/apt-get and/or /usr/bin/rpm")
|
||||
|
||||
p = module.params
|
||||
if p['state'] in ['installed', 'present']:
|
||||
module.deprecate(
|
||||
'state=%s currently behaves unexpectedly by always upgrading to the latest version if'
|
||||
' the package is already installed. This behavior is deprecated and will change in'
|
||||
' community.general 11.0.0. You can use state=latest to explicitly request this behavior'
|
||||
' or state=present_not_latest to explicitly request the behavior that state=%s will have'
|
||||
' in community.general 11.0.0, namely that the package will not be upgraded if it is'
|
||||
' already installed.' % (p['state'], p['state']),
|
||||
version='11.0.0',
|
||||
collection_name='community.general',
|
||||
)
|
||||
|
||||
modified = False
|
||||
output = ""
|
||||
|
||||
|
||||
@@ -102,40 +102,40 @@ EXAMPLES = r'''
|
||||
- name: Create a @home subvolume under the root subvolume
|
||||
community.general.btrfs_subvolume:
|
||||
name: /@home
|
||||
filesystem_device: /dev/vda2
|
||||
device: /dev/vda2
|
||||
|
||||
- name: Remove the @home subvolume if it exists
|
||||
community.general.btrfs_subvolume:
|
||||
name: /@home
|
||||
state: absent
|
||||
filesystem_device: /dev/vda2
|
||||
device: /dev/vda2
|
||||
|
||||
- name: Create a snapshot of the root subvolume named @
|
||||
community.general.btrfs_subvolume:
|
||||
name: /@
|
||||
snapshot_source: /
|
||||
filesystem_device: /dev/vda2
|
||||
device: /dev/vda2
|
||||
|
||||
- name: Create a snapshot of the root subvolume and make it the new default subvolume
|
||||
community.general.btrfs_subvolume:
|
||||
name: /@
|
||||
snapshot_source: /
|
||||
default: Yes
|
||||
filesystem_device: /dev/vda2
|
||||
device: /dev/vda2
|
||||
|
||||
- name: Create a snapshot of the /@ subvolume and recursively creating intermediate subvolumes as required
|
||||
community.general.btrfs_subvolume:
|
||||
name: /@snapshots/@2022_06_09
|
||||
snapshot_source: /@
|
||||
recursive: True
|
||||
filesystem_device: /dev/vda2
|
||||
device: /dev/vda2
|
||||
|
||||
- name: Remove the /@ subvolume and recursively delete child subvolumes as required
|
||||
community.general.btrfs_subvolume:
|
||||
name: /@snapshots/@2022_06_09
|
||||
snapshot_source: /@
|
||||
recursive: True
|
||||
filesystem_device: /dev/vda2
|
||||
device: /dev/vda2
|
||||
|
||||
'''
|
||||
|
||||
@@ -572,10 +572,7 @@ class BtrfsSubvolumeModule(object):
|
||||
self.__temporary_mounts[cache_key] = mountpoint
|
||||
|
||||
mount = self.module.get_bin_path("mount", required=True)
|
||||
command = "%s -o noatime,subvolid=%d %s %s " % (mount,
|
||||
subvolid,
|
||||
device,
|
||||
mountpoint)
|
||||
command = [mount, "-o", "noatime,subvolid=%d" % subvolid, device, mountpoint]
|
||||
result = self.module.run_command(command, check_rc=True)
|
||||
|
||||
return mountpoint
|
||||
@@ -586,10 +583,10 @@ class BtrfsSubvolumeModule(object):
|
||||
|
||||
def __cleanup_mount(self, mountpoint):
|
||||
umount = self.module.get_bin_path("umount", required=True)
|
||||
result = self.module.run_command("%s %s" % (umount, mountpoint))
|
||||
result = self.module.run_command([umount, mountpoint])
|
||||
if result[0] == 0:
|
||||
rmdir = self.module.get_bin_path("rmdir", required=True)
|
||||
self.module.run_command("%s %s" % (rmdir, mountpoint))
|
||||
self.module.run_command([rmdir, mountpoint])
|
||||
|
||||
# Format and return results
|
||||
def get_results(self):
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2021 Radek Sprta <mail@radeksprta.eu>
|
||||
# Copyright (c) 2024 Colin Nolan <cn580@alumni.york.ac.uk>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
@@ -65,6 +66,13 @@ options:
|
||||
type: str
|
||||
default: present
|
||||
choices: [ "present", "absent", "latest" ]
|
||||
directory:
|
||||
description:
|
||||
- Path to the source directory to install the Rust package from.
|
||||
- This is only used when installing packages.
|
||||
type: path
|
||||
required: false
|
||||
version_added: 9.1.0
|
||||
requirements:
|
||||
- cargo installed
|
||||
"""
|
||||
@@ -98,8 +106,14 @@ EXAMPLES = r"""
|
||||
community.general.cargo:
|
||||
name: ludusavi
|
||||
state: latest
|
||||
|
||||
- name: Install "ludusavi" Rust package from source directory
|
||||
community.general.cargo:
|
||||
name: ludusavi
|
||||
directory: /path/to/ludusavi/source
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
|
||||
@@ -115,6 +129,7 @@ class Cargo(object):
|
||||
self.state = kwargs["state"]
|
||||
self.version = kwargs["version"]
|
||||
self.locked = kwargs["locked"]
|
||||
self.directory = kwargs["directory"]
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
@@ -143,7 +158,7 @@ class Cargo(object):
|
||||
|
||||
data, dummy = self._exec(cmd, True, False, False)
|
||||
|
||||
package_regex = re.compile(r"^([\w\-]+) v(.+):$")
|
||||
package_regex = re.compile(r"^([\w\-]+) v(\S+).*:$")
|
||||
installed = {}
|
||||
for line in data.splitlines():
|
||||
package_info = package_regex.match(line)
|
||||
@@ -163,19 +178,53 @@ class Cargo(object):
|
||||
if self.version:
|
||||
cmd.append("--version")
|
||||
cmd.append(self.version)
|
||||
if self.directory:
|
||||
cmd.append("--path")
|
||||
cmd.append(self.directory)
|
||||
return self._exec(cmd)
|
||||
|
||||
def is_outdated(self, name):
|
||||
installed_version = self.get_installed().get(name)
|
||||
latest_version = (
|
||||
self.get_latest_published_version(name)
|
||||
if not self.directory
|
||||
else self.get_source_directory_version(name)
|
||||
)
|
||||
return installed_version != latest_version
|
||||
|
||||
def get_latest_published_version(self, name):
|
||||
cmd = ["search", name, "--limit", "1"]
|
||||
data, dummy = self._exec(cmd, True, False, False)
|
||||
|
||||
match = re.search(r'"(.+)"', data)
|
||||
if match:
|
||||
latest_version = match.group(1)
|
||||
if not match:
|
||||
self.module.fail_json(
|
||||
msg="No published version for package %s found" % name
|
||||
)
|
||||
return match.group(1)
|
||||
|
||||
return installed_version != latest_version
|
||||
def get_source_directory_version(self, name):
|
||||
cmd = [
|
||||
"metadata",
|
||||
"--format-version",
|
||||
"1",
|
||||
"--no-deps",
|
||||
"--manifest-path",
|
||||
os.path.join(self.directory, "Cargo.toml"),
|
||||
]
|
||||
data, dummy = self._exec(cmd, True, False, False)
|
||||
manifest = json.loads(data)
|
||||
|
||||
package = next(
|
||||
(package for package in manifest["packages"] if package["name"] == name),
|
||||
None,
|
||||
)
|
||||
if not package:
|
||||
self.module.fail_json(
|
||||
msg="Package %s not defined in source, found: %s"
|
||||
% (name, [x["name"] for x in manifest["packages"]])
|
||||
)
|
||||
return package["version"]
|
||||
|
||||
def uninstall(self, packages=None):
|
||||
cmd = ["uninstall"]
|
||||
@@ -191,16 +240,21 @@ def main():
|
||||
state=dict(default="present", choices=["present", "absent", "latest"]),
|
||||
version=dict(default=None, type="str"),
|
||||
locked=dict(default=False, type="bool"),
|
||||
directory=dict(default=None, type="path"),
|
||||
)
|
||||
module = AnsibleModule(argument_spec=arg_spec, supports_check_mode=True)
|
||||
|
||||
name = module.params["name"]
|
||||
state = module.params["state"]
|
||||
version = module.params["version"]
|
||||
directory = module.params["directory"]
|
||||
|
||||
if not name:
|
||||
module.fail_json(msg="Package name must be specified")
|
||||
|
||||
if directory is not None and not os.path.isdir(directory):
|
||||
module.fail_json(msg="Source directory does not exist")
|
||||
|
||||
# Set LANG env since we parse stdout
|
||||
module.run_command_environ_update = dict(
|
||||
LANG="C", LC_ALL="C", LC_MESSAGES="C", LC_CTYPE="C"
|
||||
|
||||
@@ -148,9 +148,9 @@ options:
|
||||
type:
|
||||
description:
|
||||
- The type of DNS record to create. Required if O(state=present).
|
||||
- Note that V(SPF) is no longer supported by CloudFlare. Support for it will be removed from community.general 9.0.0.
|
||||
- Support for V(SPF) has been removed from community.general 9.0.0 since that record type is no longer supported by CloudFlare.
|
||||
type: str
|
||||
choices: [ A, AAAA, CNAME, DS, MX, NS, SPF, SRV, SSHFP, TLSA, CAA, TXT ]
|
||||
choices: [ A, AAAA, CNAME, DS, MX, NS, SRV, SSHFP, TLSA, CAA, TXT ]
|
||||
value:
|
||||
description:
|
||||
- The record value.
|
||||
@@ -674,7 +674,7 @@ class CloudflareAPI(object):
|
||||
if (params['type'] is None) or (params['record'] is None):
|
||||
self.module.fail_json(msg="You must provide a type and a record to create a new record")
|
||||
|
||||
if (params['type'] in ['A', 'AAAA', 'CNAME', 'TXT', 'MX', 'NS', 'SPF']):
|
||||
if (params['type'] in ['A', 'AAAA', 'CNAME', 'TXT', 'MX', 'NS']):
|
||||
if not params['value']:
|
||||
self.module.fail_json(msg="You must provide a non-empty value to create this record type")
|
||||
|
||||
@@ -716,14 +716,12 @@ class CloudflareAPI(object):
|
||||
"port": params['port'],
|
||||
"weight": params['weight'],
|
||||
"priority": params['priority'],
|
||||
"name": params['record'],
|
||||
"proto": params['proto'],
|
||||
"service": params['service']
|
||||
}
|
||||
|
||||
new_record = {
|
||||
"type": params['type'],
|
||||
"name": params['service'] + '.' + params['proto'] + '.' + params['record'],
|
||||
"ttl": params['ttl'],
|
||||
'data': srv_data,
|
||||
}
|
||||
new_record = {"type": params['type'], "ttl": params['ttl'], 'data': srv_data}
|
||||
search_value = str(params['weight']) + '\t' + str(params['port']) + '\t' + params['value']
|
||||
search_record = params['service'] + '.' + params['proto'] + '.' + params['record']
|
||||
|
||||
@@ -871,7 +869,7 @@ def main():
|
||||
state=dict(type='str', default='present', choices=['absent', 'present']),
|
||||
timeout=dict(type='int', default=30),
|
||||
ttl=dict(type='int', default=1),
|
||||
type=dict(type='str', choices=['A', 'AAAA', 'CNAME', 'DS', 'MX', 'NS', 'SPF', 'SRV', 'SSHFP', 'TLSA', 'CAA', 'TXT']),
|
||||
type=dict(type='str', choices=['A', 'AAAA', 'CNAME', 'DS', 'MX', 'NS', 'SRV', 'SSHFP', 'TLSA', 'CAA', 'TXT']),
|
||||
value=dict(type='str', aliases=['content']),
|
||||
weight=dict(type='int', default=1),
|
||||
zone=dict(type='str', required=True, aliases=['domain']),
|
||||
|
||||
254
plugins/modules/consul_agent_check.py
Normal file
254
plugins/modules/consul_agent_check.py
Normal file
@@ -0,0 +1,254 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2024, Michael Ilg
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
module: consul_agent_check
|
||||
short_description: Add, modify, and delete checks within a consul cluster
|
||||
version_added: 9.1.0
|
||||
description:
|
||||
- Allows the addition, modification and deletion of checks in a consul
|
||||
cluster via the agent. For more details on using and configuring Checks,
|
||||
see U(https://developer.hashicorp.com/consul/api-docs/agent/check).
|
||||
- Currently, there is no complete way to retrieve the script, interval or TTL
|
||||
metadata for a registered check. Without this metadata it is not possible to
|
||||
tell if the data supplied with ansible represents a change to a check. As a
|
||||
result this does not attempt to determine changes and will always report a
|
||||
changed occurred. An API method is planned to supply this metadata so at that
|
||||
stage change management will be added.
|
||||
author:
|
||||
- Michael Ilg (@Ilgmi)
|
||||
extends_documentation_fragment:
|
||||
- community.general.consul
|
||||
- community.general.consul.actiongroup_consul
|
||||
- community.general.consul.token
|
||||
- community.general.attributes
|
||||
attributes:
|
||||
check_mode:
|
||||
support: full
|
||||
details:
|
||||
- The result is the object as it is defined in the module options and not the object structure of the consul API.
|
||||
For a better overview of what the object structure looks like,
|
||||
take a look at U(https://developer.hashicorp.com/consul/api-docs/agent/check#list-checks).
|
||||
diff_mode:
|
||||
support: partial
|
||||
details:
|
||||
- In check mode the diff will show the object as it is defined in the module options and not the object structure of the consul API.
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- Whether the check should be present or absent.
|
||||
choices: ['present', 'absent']
|
||||
default: present
|
||||
type: str
|
||||
name:
|
||||
description:
|
||||
- Required name for the service check.
|
||||
type: str
|
||||
id:
|
||||
description:
|
||||
- Specifies a unique ID for this check on the node. This defaults to the O(name) parameter, but it may be necessary to provide
|
||||
an ID for uniqueness. This value will return in the response as "CheckId".
|
||||
type: str
|
||||
interval:
|
||||
description:
|
||||
- The interval at which the service check will be run.
|
||||
This is a number with a V(s) or V(m) suffix to signify the units of seconds or minutes, for example V(15s) or V(1m).
|
||||
If no suffix is supplied V(s) will be used by default, for example V(10) will be V(10s).
|
||||
- Required if one of the parameters O(args), O(http), or O(tcp) is specified.
|
||||
type: str
|
||||
notes:
|
||||
description:
|
||||
- Notes to attach to check when registering it.
|
||||
type: str
|
||||
args:
|
||||
description:
|
||||
- Specifies command arguments to run to update the status of the check.
|
||||
- Requires O(interval) to be provided.
|
||||
- Mutually exclusive with O(ttl), O(tcp) and O(http).
|
||||
type: list
|
||||
elements: str
|
||||
ttl:
|
||||
description:
|
||||
- Checks can be registered with a TTL instead of a O(args) and O(interval)
|
||||
this means that the service will check in with the agent before the
|
||||
TTL expires. If it doesn't the check will be considered failed.
|
||||
Required if registering a check and the script an interval are missing
|
||||
Similar to the interval this is a number with a V(s) or V(m) suffix to
|
||||
signify the units of seconds or minutes, for example V(15s) or V(1m).
|
||||
If no suffix is supplied V(s) will be used by default, for example V(10) will be V(10s).
|
||||
- Mutually exclusive with O(args), O(tcp) and O(http).
|
||||
type: str
|
||||
tcp:
|
||||
description:
|
||||
- Checks can be registered with a TCP port. This means that consul
|
||||
will check if the connection attempt to that port is successful (that is, the port is currently accepting connections).
|
||||
The format is V(host:port), for example V(localhost:80).
|
||||
- Requires O(interval) to be provided.
|
||||
- Mutually exclusive with O(args), O(ttl) and O(http).
|
||||
type: str
|
||||
version_added: '1.3.0'
|
||||
http:
|
||||
description:
|
||||
- Checks can be registered with an HTTP endpoint. This means that consul
|
||||
will check that the http endpoint returns a successful HTTP status.
|
||||
- Requires O(interval) to be provided.
|
||||
- Mutually exclusive with O(args), O(ttl) and O(tcp).
|
||||
type: str
|
||||
timeout:
|
||||
description:
|
||||
- A custom HTTP check timeout. The consul default is 10 seconds.
|
||||
Similar to the interval this is a number with a V(s) or V(m) suffix to
|
||||
signify the units of seconds or minutes, for example V(15s) or V(1m).
|
||||
If no suffix is supplied V(s) will be used by default, for example V(10) will be V(10s).
|
||||
type: str
|
||||
service_id:
|
||||
description:
|
||||
- The ID for the service, must be unique per node. If O(state=absent),
|
||||
defaults to the service name if supplied.
|
||||
type: str
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Register tcp check for service 'nginx'
|
||||
community.general.consul_agent_check:
|
||||
name: nginx_tcp_check
|
||||
service_id: nginx
|
||||
interval: 60s
|
||||
tcp: localhost:80
|
||||
notes: "Nginx Check"
|
||||
|
||||
- name: Register http check for service 'nginx'
|
||||
community.general.consul_agent_check:
|
||||
name: nginx_http_check
|
||||
service_id: nginx
|
||||
interval: 60s
|
||||
http: http://localhost:80/status
|
||||
notes: "Nginx Check"
|
||||
|
||||
- name: Remove check for service 'nginx'
|
||||
community.general.consul_agent_check:
|
||||
state: absent
|
||||
id: nginx_http_check
|
||||
service_id: "{{ nginx_service.ID }}"
|
||||
'''
|
||||
|
||||
RETURN = """
|
||||
check:
|
||||
description: The check as returned by the consul HTTP API.
|
||||
returned: always
|
||||
type: dict
|
||||
sample:
|
||||
CheckID: nginx_check
|
||||
ServiceID: nginx
|
||||
Interval: 30s
|
||||
Type: http
|
||||
Notes: Nginx Check
|
||||
operation:
|
||||
description: The operation performed.
|
||||
returned: changed
|
||||
type: str
|
||||
sample: update
|
||||
"""
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible_collections.community.general.plugins.module_utils.consul import (
|
||||
AUTH_ARGUMENTS_SPEC,
|
||||
OPERATION_CREATE,
|
||||
OPERATION_UPDATE,
|
||||
OPERATION_DELETE,
|
||||
OPERATION_READ,
|
||||
_ConsulModule,
|
||||
validate_check,
|
||||
)
|
||||
|
||||
_ARGUMENT_SPEC = {
|
||||
"state": dict(default="present", choices=["present", "absent"]),
|
||||
"name": dict(type='str'),
|
||||
"id": dict(type='str'),
|
||||
"interval": dict(type='str'),
|
||||
"notes": dict(type='str'),
|
||||
"args": dict(type='list', elements='str'),
|
||||
"http": dict(type='str'),
|
||||
"tcp": dict(type='str'),
|
||||
"ttl": dict(type='str'),
|
||||
"timeout": dict(type='str'),
|
||||
"service_id": dict(type='str'),
|
||||
}
|
||||
|
||||
_MUTUALLY_EXCLUSIVE = [
|
||||
('args', 'ttl', 'tcp', 'http'),
|
||||
]
|
||||
|
||||
_REQUIRED_IF = [
|
||||
('state', 'present', ['name']),
|
||||
('state', 'absent', ('id', 'name'), True),
|
||||
]
|
||||
|
||||
_REQUIRED_BY = {
|
||||
'args': 'interval',
|
||||
'http': 'interval',
|
||||
'tcp': 'interval',
|
||||
}
|
||||
|
||||
_ARGUMENT_SPEC.update(AUTH_ARGUMENTS_SPEC)
|
||||
|
||||
|
||||
class ConsulAgentCheckModule(_ConsulModule):
|
||||
api_endpoint = "agent/check"
|
||||
result_key = "check"
|
||||
unique_identifiers = ["id", "name"]
|
||||
operational_attributes = {"Node", "CheckID", "Output", "ServiceName", "ServiceTags",
|
||||
"Status", "Type", "ExposedPort", "Definition"}
|
||||
|
||||
def endpoint_url(self, operation, identifier=None):
|
||||
if operation == OPERATION_READ:
|
||||
return "agent/checks"
|
||||
if operation in [OPERATION_CREATE, OPERATION_UPDATE]:
|
||||
return "/".join([self.api_endpoint, "register"])
|
||||
if operation == OPERATION_DELETE:
|
||||
return "/".join([self.api_endpoint, "deregister", identifier])
|
||||
|
||||
return super(ConsulAgentCheckModule, self).endpoint_url(operation, identifier)
|
||||
|
||||
def read_object(self):
|
||||
url = self.endpoint_url(OPERATION_READ)
|
||||
checks = self.get(url)
|
||||
identifier = self.id_from_obj(self.params)
|
||||
if identifier in checks:
|
||||
return checks[identifier]
|
||||
return None
|
||||
|
||||
def prepare_object(self, existing, obj):
|
||||
existing = super(ConsulAgentCheckModule, self).prepare_object(existing, obj)
|
||||
validate_check(existing)
|
||||
return existing
|
||||
|
||||
def delete_object(self, obj):
|
||||
if not self._module.check_mode:
|
||||
self.put(self.endpoint_url(OPERATION_DELETE, obj.get("CheckID")))
|
||||
return {}
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
_ARGUMENT_SPEC,
|
||||
mutually_exclusive=_MUTUALLY_EXCLUSIVE,
|
||||
required_if=_REQUIRED_IF,
|
||||
required_by=_REQUIRED_BY,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
consul_module = ConsulAgentCheckModule(module)
|
||||
consul_module.execute()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
289
plugins/modules/consul_agent_service.py
Normal file
289
plugins/modules/consul_agent_service.py
Normal file
@@ -0,0 +1,289 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2024, Michael Ilg
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
module: consul_agent_service
|
||||
short_description: Add, modify and delete services within a consul cluster
|
||||
version_added: 9.1.0
|
||||
description:
|
||||
- Allows the addition, modification and deletion of services in a consul
|
||||
cluster via the agent.
|
||||
- There are currently no plans to create services and checks in one.
|
||||
This is because the Consul API does not provide checks for a service and
|
||||
the checks themselves do not match the module parameters.
|
||||
Therefore, only a service without checks can be created in this module.
|
||||
author:
|
||||
- Michael Ilg (@Ilgmi)
|
||||
extends_documentation_fragment:
|
||||
- community.general.consul
|
||||
- community.general.consul.actiongroup_consul
|
||||
- community.general.consul.token
|
||||
- community.general.attributes
|
||||
attributes:
|
||||
check_mode:
|
||||
support: full
|
||||
diff_mode:
|
||||
support: partial
|
||||
details:
|
||||
- In check mode the diff will miss operational attributes.
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- Whether the service should be present or absent.
|
||||
choices: ['present', 'absent']
|
||||
default: present
|
||||
type: str
|
||||
name:
|
||||
description:
|
||||
- Unique name for the service on a node, must be unique per node,
|
||||
required if registering a service.
|
||||
type: str
|
||||
id:
|
||||
description:
|
||||
- Specifies a unique ID for this service. This must be unique per agent. This defaults to the O(name) parameter if not provided.
|
||||
If O(state=absent), defaults to the service name if supplied.
|
||||
type: str
|
||||
tags:
|
||||
description:
|
||||
- Tags that will be attached to the service registration.
|
||||
type: list
|
||||
elements: str
|
||||
address:
|
||||
description:
|
||||
- The address to advertise that the service will be listening on.
|
||||
This value will be passed as the C(address) parameter to Consul's
|
||||
C(/v1/agent/service/register) API method, so refer to the Consul API
|
||||
documentation for further details.
|
||||
type: str
|
||||
meta:
|
||||
description:
|
||||
- Optional meta data used for filtering.
|
||||
For keys, the characters C(A-Z), C(a-z), C(0-9), C(_), C(-) are allowed.
|
||||
Not allowed characters are replaced with underscores.
|
||||
type: dict
|
||||
service_port:
|
||||
description:
|
||||
- The port on which the service is listening. Can optionally be supplied for
|
||||
registration of a service, that is if O(name) or O(id) is set.
|
||||
type: int
|
||||
enable_tag_override:
|
||||
description:
|
||||
- Specifies to disable the anti-entropy feature for this service's tags.
|
||||
If EnableTagOverride is set to true then external agents can update this service in the catalog and modify the tags.
|
||||
type: bool
|
||||
default: False
|
||||
weights:
|
||||
description:
|
||||
- Specifies weights for the service
|
||||
type: dict
|
||||
suboptions:
|
||||
passing:
|
||||
description:
|
||||
- Weights for passing.
|
||||
type: int
|
||||
default: 1
|
||||
warning:
|
||||
description:
|
||||
- Weights for warning.
|
||||
type: int
|
||||
default: 1
|
||||
default: {"passing": 1, "warning": 1}
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Register nginx service with the local consul agent
|
||||
community.general.consul_agent_service:
|
||||
host: consul1.example.com
|
||||
token: some_management_acl
|
||||
name: nginx
|
||||
service_port: 80
|
||||
|
||||
- name: Register nginx with a tcp check
|
||||
community.general.consul_agent_service:
|
||||
host: consul1.example.com
|
||||
token: some_management_acl
|
||||
name: nginx
|
||||
service_port: 80
|
||||
|
||||
- name: Register nginx with an http check
|
||||
community.general.consul_agent_service:
|
||||
host: consul1.example.com
|
||||
token: some_management_acl
|
||||
name: nginx
|
||||
service_port: 80
|
||||
|
||||
- name: Register external service nginx available at 10.1.5.23
|
||||
community.general.consul_agent_service:
|
||||
host: consul1.example.com
|
||||
token: some_management_acl
|
||||
name: nginx
|
||||
service_port: 80
|
||||
address: 10.1.5.23
|
||||
|
||||
- name: Register nginx with some service tags
|
||||
community.general.consul_agent_service:
|
||||
host: consul1.example.com
|
||||
token: some_management_acl
|
||||
name: nginx
|
||||
service_port: 80
|
||||
tags:
|
||||
- prod
|
||||
- webservers
|
||||
|
||||
- name: Register nginx with some service meta
|
||||
community.general.consul_agent_service:
|
||||
host: consul1.example.com
|
||||
token: some_management_acl
|
||||
name: nginx
|
||||
service_port: 80
|
||||
meta:
|
||||
nginx_version: 1.25.3
|
||||
|
||||
- name: Remove nginx service
|
||||
community.general.consul_agent_service:
|
||||
host: consul1.example.com
|
||||
token: some_management_acl
|
||||
service_id: nginx
|
||||
state: absent
|
||||
|
||||
- name: Register celery worker service
|
||||
community.general.consul_agent_service:
|
||||
host: consul1.example.com
|
||||
token: some_management_acl
|
||||
name: celery-worker
|
||||
tags:
|
||||
- prod
|
||||
- worker
|
||||
'''
|
||||
|
||||
RETURN = """
|
||||
service:
|
||||
description: The service as returned by the consul HTTP API.
|
||||
returned: always
|
||||
type: dict
|
||||
sample:
|
||||
ID: nginx
|
||||
Service: nginx
|
||||
Address: localhost
|
||||
Port: 80
|
||||
Tags:
|
||||
- http
|
||||
Meta:
|
||||
- nginx_version: 1.23.3
|
||||
Datacenter: dc1
|
||||
Weights:
|
||||
Passing: 1
|
||||
Warning: 1
|
||||
ContentHash: 61a245cd985261ac
|
||||
EnableTagOverride: false
|
||||
operation:
|
||||
description: The operation performed.
|
||||
returned: changed
|
||||
type: str
|
||||
sample: update
|
||||
"""
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible_collections.community.general.plugins.module_utils.consul import (
|
||||
AUTH_ARGUMENTS_SPEC,
|
||||
OPERATION_CREATE,
|
||||
OPERATION_UPDATE,
|
||||
OPERATION_DELETE,
|
||||
_ConsulModule
|
||||
)
|
||||
|
||||
_CHECK_MUTUALLY_EXCLUSIVE = [('args', 'ttl', 'tcp', 'http')]
|
||||
_CHECK_REQUIRED_BY = {
|
||||
'args': 'interval',
|
||||
'http': 'interval',
|
||||
'tcp': 'interval',
|
||||
}
|
||||
|
||||
_ARGUMENT_SPEC = {
|
||||
"state": dict(default="present", choices=["present", "absent"]),
|
||||
"name": dict(type='str'),
|
||||
"id": dict(type='str'),
|
||||
"tags": dict(type='list', elements='str'),
|
||||
"address": dict(type='str'),
|
||||
"meta": dict(type='dict'),
|
||||
"service_port": dict(type='int'),
|
||||
"enable_tag_override": dict(type='bool', default=False),
|
||||
"weights": dict(type='dict', options=dict(
|
||||
passing=dict(type='int', default=1, no_log=False),
|
||||
warning=dict(type='int', default=1)
|
||||
), default={"passing": 1, "warning": 1})
|
||||
}
|
||||
|
||||
_REQUIRED_IF = [
|
||||
('state', 'present', ['name']),
|
||||
('state', 'absent', ('id', 'name'), True),
|
||||
]
|
||||
|
||||
_ARGUMENT_SPEC.update(AUTH_ARGUMENTS_SPEC)
|
||||
|
||||
|
||||
class ConsulAgentServiceModule(_ConsulModule):
|
||||
api_endpoint = "agent/service"
|
||||
result_key = "service"
|
||||
unique_identifiers = ["id", "name"]
|
||||
operational_attributes = {"Service", "ContentHash", "Datacenter"}
|
||||
|
||||
def endpoint_url(self, operation, identifier=None):
|
||||
if operation in [OPERATION_CREATE, OPERATION_UPDATE]:
|
||||
return "/".join([self.api_endpoint, "register"])
|
||||
if operation == OPERATION_DELETE:
|
||||
return "/".join([self.api_endpoint, "deregister", identifier])
|
||||
|
||||
return super(ConsulAgentServiceModule, self).endpoint_url(operation, identifier)
|
||||
|
||||
def prepare_object(self, existing, obj):
|
||||
existing = super(ConsulAgentServiceModule, self).prepare_object(existing, obj)
|
||||
if "ServicePort" in existing:
|
||||
existing["Port"] = existing.pop("ServicePort")
|
||||
|
||||
if "ID" not in existing:
|
||||
existing["ID"] = existing["Name"]
|
||||
|
||||
return existing
|
||||
|
||||
def needs_update(self, api_obj, module_obj):
|
||||
obj = {}
|
||||
if "Service" in api_obj:
|
||||
obj["Service"] = api_obj["Service"]
|
||||
api_obj = self.prepare_object(api_obj, obj)
|
||||
|
||||
if "Name" in module_obj:
|
||||
module_obj["Service"] = module_obj.pop("Name")
|
||||
if "ServicePort" in module_obj:
|
||||
module_obj["Port"] = module_obj.pop("ServicePort")
|
||||
|
||||
return super(ConsulAgentServiceModule, self).needs_update(api_obj, module_obj)
|
||||
|
||||
def delete_object(self, obj):
|
||||
if not self._module.check_mode:
|
||||
url = self.endpoint_url(OPERATION_DELETE, self.id_from_obj(obj, camel_case=True))
|
||||
self.put(url)
|
||||
return {}
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
_ARGUMENT_SPEC,
|
||||
required_if=_REQUIRED_IF,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
consul_module = ConsulAgentServiceModule(module)
|
||||
consul_module.execute()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -168,7 +168,7 @@ def normalize_ttl(ttl):
|
||||
class ConsulAuthMethodModule(_ConsulModule):
|
||||
api_endpoint = "acl/auth-method"
|
||||
result_key = "auth_method"
|
||||
unique_identifier = "name"
|
||||
unique_identifiers = ["name"]
|
||||
|
||||
def map_param(self, k, v, is_update):
|
||||
if k == "config" and v:
|
||||
|
||||
@@ -124,7 +124,7 @@ from ansible_collections.community.general.plugins.module_utils.consul import (
|
||||
class ConsulBindingRuleModule(_ConsulModule):
|
||||
api_endpoint = "acl/binding-rule"
|
||||
result_key = "binding_rule"
|
||||
unique_identifier = "id"
|
||||
unique_identifiers = ["id"]
|
||||
|
||||
def read_object(self):
|
||||
url = "acl/binding-rules?authmethod={0}".format(self.params["auth_method"])
|
||||
|
||||
@@ -33,6 +33,8 @@ attributes:
|
||||
version_added: 8.3.0
|
||||
details:
|
||||
- In check mode the diff will miss operational attributes.
|
||||
action_group:
|
||||
version_added: 8.3.0
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
@@ -143,7 +145,7 @@ _ARGUMENT_SPEC.update(AUTH_ARGUMENTS_SPEC)
|
||||
class ConsulPolicyModule(_ConsulModule):
|
||||
api_endpoint = "acl/policy"
|
||||
result_key = "policy"
|
||||
unique_identifier = "id"
|
||||
unique_identifiers = ["id"]
|
||||
|
||||
def endpoint_url(self, operation, identifier=None):
|
||||
if operation == OPERATION_READ:
|
||||
|
||||
@@ -32,6 +32,8 @@ attributes:
|
||||
details:
|
||||
- In check mode the diff will miss operational attributes.
|
||||
version_added: 8.3.0
|
||||
action_group:
|
||||
version_added: 8.3.0
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
@@ -210,7 +212,7 @@ from ansible_collections.community.general.plugins.module_utils.consul import (
|
||||
class ConsulRoleModule(_ConsulModule):
|
||||
api_endpoint = "acl/role"
|
||||
result_key = "role"
|
||||
unique_identifier = "id"
|
||||
unique_identifiers = ["id"]
|
||||
|
||||
def endpoint_url(self, operation, identifier=None):
|
||||
if operation == OPERATION_READ:
|
||||
|
||||
@@ -29,6 +29,8 @@ attributes:
|
||||
support: none
|
||||
diff_mode:
|
||||
support: none
|
||||
action_group:
|
||||
version_added: 8.3.0
|
||||
options:
|
||||
id:
|
||||
description:
|
||||
|
||||
@@ -31,6 +31,8 @@ attributes:
|
||||
support: partial
|
||||
details:
|
||||
- In check mode the diff will miss operational attributes.
|
||||
action_group:
|
||||
version_added: 8.3.0
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
@@ -233,13 +235,13 @@ def normalize_link_obj(api_obj, module_obj, key):
|
||||
class ConsulTokenModule(_ConsulModule):
|
||||
api_endpoint = "acl/token"
|
||||
result_key = "token"
|
||||
unique_identifier = "accessor_id"
|
||||
unique_identifiers = ["accessor_id"]
|
||||
|
||||
create_only_fields = {"expiration_ttl"}
|
||||
|
||||
def read_object(self):
|
||||
# if `accessor_id` is not supplied we can only create objects and are not idempotent
|
||||
if not self.params.get(self.unique_identifier):
|
||||
if not self.id_from_obj(self.params):
|
||||
return None
|
||||
return super(ConsulTokenModule, self).read_object()
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user