mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-04-29 09:56:53 +00:00
Compare commits
327 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
28d7c499b5 | ||
|
|
9f218be912 | ||
|
|
363936a544 | ||
|
|
a8351fa9df | ||
|
|
4b84127ef3 | ||
|
|
82e1f24d2e | ||
|
|
9e8b6ea803 | ||
|
|
db8a6609de | ||
|
|
5be75b8e43 | ||
|
|
71b1ae0aff | ||
|
|
9b2fa2cfd7 | ||
|
|
83193ffc1f | ||
|
|
4b955a3548 | ||
|
|
c7dbda3656 | ||
|
|
e98cf96499 | ||
|
|
8d52dc3f1d | ||
|
|
5f9ea0b7ac | ||
|
|
d125f5bee6 | ||
|
|
82c5970811 | ||
|
|
f7267c7123 | ||
|
|
b6e1d04c0c | ||
|
|
2a7f9f28b9 | ||
|
|
f5c544d99c | ||
|
|
8055dd1c9b | ||
|
|
2b62826082 | ||
|
|
f7532c7d9d | ||
|
|
e556abb56b | ||
|
|
f4d8168131 | ||
|
|
6ee1f27304 | ||
|
|
714e06089e | ||
|
|
58846a6203 | ||
|
|
c7150dd818 | ||
|
|
8c7778735d | ||
|
|
e829a7384f | ||
|
|
09ae963f58 | ||
|
|
3b87f58070 | ||
|
|
bb59962061 | ||
|
|
932b657ab8 | ||
|
|
9abba2816e | ||
|
|
3ae19aa28f | ||
|
|
33133f3ba9 | ||
|
|
eb4f8d4301 | ||
|
|
cdb19ab8c1 | ||
|
|
e07f5e2f40 | ||
|
|
479e7df687 | ||
|
|
4ea40e9473 | ||
|
|
91fdc8e06a | ||
|
|
d7bb8648f3 | ||
|
|
557a4ae653 | ||
|
|
40809ed953 | ||
|
|
c7084c6c30 | ||
|
|
1bf5a44a77 | ||
|
|
8a51a3c30c | ||
|
|
85e866ccdd | ||
|
|
323a1dcff8 | ||
|
|
324c22f612 | ||
|
|
55893f27c9 | ||
|
|
0ca07b0b05 | ||
|
|
6c9713b36c | ||
|
|
160e00e5b9 | ||
|
|
cda6fc956f | ||
|
|
39895a6d38 | ||
|
|
8c069f3afb | ||
|
|
8dc5a60294 | ||
|
|
cd83b245bb | ||
|
|
92d7bd68e9 | ||
|
|
53978b7440 | ||
|
|
35b252c9ad | ||
|
|
eae7161bf0 | ||
|
|
43396efa2c | ||
|
|
b88b04593f | ||
|
|
fbebcbada5 | ||
|
|
7bf24eeb84 | ||
|
|
522fb23e06 | ||
|
|
a0d8f4de78 | ||
|
|
28b3da88a9 | ||
|
|
f4237dde1b | ||
|
|
756e8f5cca | ||
|
|
12b48aaa2d | ||
|
|
c4009deeb1 | ||
|
|
fc2e6c4b45 | ||
|
|
9489a1ea1c | ||
|
|
c3fd14e18f | ||
|
|
9a7a7a9658 | ||
|
|
721108d92e | ||
|
|
b38423c059 | ||
|
|
d0f229f5d8 | ||
|
|
4030481b36 | ||
|
|
8a9b98273d | ||
|
|
be9dcd2c85 | ||
|
|
fc530cd3f5 | ||
|
|
e7ff0acdac | ||
|
|
1beb38ceff | ||
|
|
7d97b37b21 | ||
|
|
17c7687283 | ||
|
|
6e3a56ffce | ||
|
|
bdf8852e8d | ||
|
|
bc9dca4bc8 | ||
|
|
bc1df329a2 | ||
|
|
208df2c9e6 | ||
|
|
afeeb89af6 | ||
|
|
517e2d48eb | ||
|
|
6012d2623e | ||
|
|
8fa667eeb7 | ||
|
|
568814fc3e | ||
|
|
58d89ce442 | ||
|
|
12708c3848 | ||
|
|
9021e7416d | ||
|
|
6b17ac1f30 | ||
|
|
0862511e82 | ||
|
|
a23cd6c1d5 | ||
|
|
9f1a9e306c | ||
|
|
0c03f34f54 | ||
|
|
631d215fe8 | ||
|
|
40cad3e7a9 | ||
|
|
41bd07e372 | ||
|
|
14bc13ba3c | ||
|
|
c2d3302fc4 | ||
|
|
ce6b2bc362 | ||
|
|
d6ebba1aea | ||
|
|
6303096648 | ||
|
|
f3a02b3efb | ||
|
|
8652fd9528 | ||
|
|
e7d8ef4cf9 | ||
|
|
4c8c25bc93 | ||
|
|
5fcb98cd3f | ||
|
|
7721420388 | ||
|
|
d1d9895eb6 | ||
|
|
17dce5a288 | ||
|
|
eaf3926c2c | ||
|
|
33998a5b70 | ||
|
|
aff2ebcdbc | ||
|
|
6a558734f7 | ||
|
|
55cfd27be9 | ||
|
|
bf728aadfb | ||
|
|
f9f5c45c94 | ||
|
|
eafdf87b1b | ||
|
|
5e27bbfdf6 | ||
|
|
7496466f9d | ||
|
|
a0c67a8894 | ||
|
|
5988b9acea | ||
|
|
f7176df480 | ||
|
|
fe3eec0122 | ||
|
|
e75dc74613 | ||
|
|
e7a6412ec4 | ||
|
|
a8809401ee | ||
|
|
2089769ccc | ||
|
|
d74425580b | ||
|
|
91152cb123 | ||
|
|
c6393cb2ac | ||
|
|
4fda040e9e | ||
|
|
c1f2f126cf | ||
|
|
d08924d759 | ||
|
|
478652843f | ||
|
|
4b17fd4265 | ||
|
|
c7fa11d576 | ||
|
|
d17ec06d2a | ||
|
|
fd9d948267 | ||
|
|
a942545dd2 | ||
|
|
87053e5266 | ||
|
|
3a7044e2b8 | ||
|
|
b46d5d8197 | ||
|
|
5d7899b341 | ||
|
|
00bfc3e131 | ||
|
|
e815040877 | ||
|
|
cc5e1b6fe7 | ||
|
|
17b4219b8b | ||
|
|
d7c1a814ea | ||
|
|
f9448574bd | ||
|
|
d9951cbc32 | ||
|
|
796ad3565e | ||
|
|
f6714edabb | ||
|
|
e00e82b0ca | ||
|
|
3ce4fe8dd8 | ||
|
|
0be7162848 | ||
|
|
528216fd7e | ||
|
|
8a344ea036 | ||
|
|
9abdc5a995 | ||
|
|
7020b27b0a | ||
|
|
b818afd464 | ||
|
|
c70edfa84b | ||
|
|
e0324cdc90 | ||
|
|
3a6955cbd7 | ||
|
|
ea6fb9da8f | ||
|
|
70503411ee | ||
|
|
24feb1dd10 | ||
|
|
1c4e2d51b2 | ||
|
|
e427857db7 | ||
|
|
680f3f8970 | ||
|
|
3e56da5371 | ||
|
|
4b382ed1df | ||
|
|
cc8e2d676a | ||
|
|
065ce3a134 | ||
|
|
d048ea9586 | ||
|
|
2bf85cca51 | ||
|
|
9adc82d5d1 | ||
|
|
0ae8f9d631 | ||
|
|
7609cebae9 | ||
|
|
33af903b24 | ||
|
|
43dee97e43 | ||
|
|
d074af8d09 | ||
|
|
59479b4abd | ||
|
|
7adb99855a | ||
|
|
f6ee2177a2 | ||
|
|
7748002636 | ||
|
|
704a3019b7 | ||
|
|
fc74f9a4f2 | ||
|
|
5365647ee7 | ||
|
|
dc0d00452f | ||
|
|
eb73dc464e | ||
|
|
49349fce5e | ||
|
|
e79c7e0f41 | ||
|
|
242258eb53 | ||
|
|
53c1ed184d | ||
|
|
91a681870e | ||
|
|
9d8bec14c0 | ||
|
|
601fa0df62 | ||
|
|
216962a98c | ||
|
|
99a35b1664 | ||
|
|
4d704c03df | ||
|
|
11406715f5 | ||
|
|
c34a22717e | ||
|
|
41e3f4d5fa | ||
|
|
e5dc697887 | ||
|
|
b6883492c7 | ||
|
|
92ea63e20b | ||
|
|
2d6e369d81 | ||
|
|
867704dd75 | ||
|
|
eb734d03c0 | ||
|
|
3fd4cdb119 | ||
|
|
89ad18d1a7 | ||
|
|
c4a2801f99 | ||
|
|
a53bf9d261 | ||
|
|
3571df837d | ||
|
|
70e2048d8c | ||
|
|
93f5a48b8c | ||
|
|
22efbcc627 | ||
|
|
7b404fd45d | ||
|
|
8b5dc27d23 | ||
|
|
08fa05f05f | ||
|
|
cfaeed0492 | ||
|
|
fd63f583ef | ||
|
|
f88f40086d | ||
|
|
c10fd44baf | ||
|
|
cd48e818ae | ||
|
|
806f6da16b | ||
|
|
6bff57ee6e | ||
|
|
13e9e4b196 | ||
|
|
ffb9b6ff96 | ||
|
|
068a799bba | ||
|
|
75b4539708 | ||
|
|
9642be8b19 | ||
|
|
4871140696 | ||
|
|
5d9eb8be95 | ||
|
|
24aeedbc15 | ||
|
|
fb04dc3db2 | ||
|
|
24f27a0bdf | ||
|
|
f710a10f25 | ||
|
|
e85b008036 | ||
|
|
e06a0e22f7 | ||
|
|
ccdcf70d69 | ||
|
|
3b13c30112 | ||
|
|
473e557c2f | ||
|
|
eff0cb0ed9 | ||
|
|
45eb1e3915 | ||
|
|
b271dba4bf | ||
|
|
c694abbdf9 | ||
|
|
3e0d84bdda | ||
|
|
2ed82e0318 | ||
|
|
6fc1df9b83 | ||
|
|
7ae8cc9902 | ||
|
|
8801463575 | ||
|
|
f3ecf4c7f8 | ||
|
|
bb2169340d | ||
|
|
9395df1c6f | ||
|
|
9f47cdde32 | ||
|
|
032996e005 | ||
|
|
1f6d404deb | ||
|
|
2dbe529a90 | ||
|
|
74ffb29573 | ||
|
|
2bd8469a92 | ||
|
|
42f7531f21 | ||
|
|
78c42def04 | ||
|
|
011b2f8bdc | ||
|
|
eddd1ba4f2 | ||
|
|
61b889749e | ||
|
|
994f08b37a | ||
|
|
621bedf751 | ||
|
|
7216286466 | ||
|
|
494909aba5 | ||
|
|
07a5f07eaa | ||
|
|
2cfbcb4efd | ||
|
|
58958fc417 | ||
|
|
b78d1999e1 | ||
|
|
91b692634e | ||
|
|
c4e7a943c0 | ||
|
|
36e8653cf7 | ||
|
|
16abb96bd8 | ||
|
|
f71a474726 | ||
|
|
59db302deb | ||
|
|
c9aae5e45c | ||
|
|
47f39675a9 | ||
|
|
01f21b1d46 | ||
|
|
b133aa40c6 | ||
|
|
c76af60a73 | ||
|
|
0c96d22994 | ||
|
|
57f8d84776 | ||
|
|
4c0eff02e5 | ||
|
|
486f6553f5 | ||
|
|
8586adcd51 | ||
|
|
4373f2f33b | ||
|
|
bd6cec2105 | ||
|
|
e588e675d9 | ||
|
|
64f74e1228 | ||
|
|
ee47497e97 | ||
|
|
a6dd7d789b | ||
|
|
27fe14bfc1 | ||
|
|
ab046755bb | ||
|
|
29790df583 | ||
|
|
a9fd9f8982 | ||
|
|
5f968fb075 | ||
|
|
cb1e6376db | ||
|
|
d643bd5794 | ||
|
|
08377df7a5 | ||
|
|
dca7fd4552 | ||
|
|
ddca52ee15 | ||
|
|
f833982d6f |
@@ -1,3 +1,9 @@
|
||||
<!--
|
||||
Copyright (c) Ansible Project
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
-->
|
||||
|
||||
## Azure Pipelines Configuration
|
||||
|
||||
Please see the [Documentation](https://github.com/ansible/community/wiki/Testing:-Azure-Pipelines) for more information.
|
||||
417
.azure-pipelines/azure-pipelines.yml
Normal file
417
.azure-pipelines/azure-pipelines.yml
Normal file
@@ -0,0 +1,417 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- stable-*
|
||||
|
||||
pr:
|
||||
autoCancel: true
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- stable-*
|
||||
|
||||
schedules:
|
||||
- cron: 0 8 * * *
|
||||
displayName: Nightly (main)
|
||||
always: true
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- cron: 0 10 * * *
|
||||
displayName: Nightly (active stable branches)
|
||||
always: true
|
||||
branches:
|
||||
include:
|
||||
- stable-8
|
||||
- stable-7
|
||||
- cron: 0 11 * * 0
|
||||
displayName: Weekly (old stable branches)
|
||||
always: true
|
||||
branches:
|
||||
include:
|
||||
- stable-6
|
||||
|
||||
variables:
|
||||
- name: checkoutPath
|
||||
value: ansible_collections/community/general
|
||||
- name: coverageBranches
|
||||
value: main
|
||||
- name: pipelinesCoverage
|
||||
value: coverage
|
||||
- name: entryPoint
|
||||
value: tests/utils/shippable/shippable.sh
|
||||
- name: fetchDepth
|
||||
value: 0
|
||||
|
||||
resources:
|
||||
containers:
|
||||
- container: default
|
||||
image: quay.io/ansible/azure-pipelines-test-container:4.0.1
|
||||
|
||||
pool: Standard
|
||||
|
||||
stages:
|
||||
### Sanity
|
||||
- stage: Sanity_devel
|
||||
displayName: Sanity devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Test {0}
|
||||
testFormat: devel/sanity/{0}
|
||||
targets:
|
||||
- test: 1
|
||||
- test: 2
|
||||
- test: 3
|
||||
- test: 4
|
||||
- test: extra
|
||||
- stage: Sanity_2_16
|
||||
displayName: Sanity 2.16
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Test {0}
|
||||
testFormat: 2.16/sanity/{0}
|
||||
targets:
|
||||
- test: 1
|
||||
- test: 2
|
||||
- test: 3
|
||||
- test: 4
|
||||
- stage: Sanity_2_15
|
||||
displayName: Sanity 2.15
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Test {0}
|
||||
testFormat: 2.15/sanity/{0}
|
||||
targets:
|
||||
- test: 1
|
||||
- test: 2
|
||||
- test: 3
|
||||
- test: 4
|
||||
- stage: Sanity_2_14
|
||||
displayName: Sanity 2.14
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Test {0}
|
||||
testFormat: 2.14/sanity/{0}
|
||||
targets:
|
||||
- test: 1
|
||||
- test: 2
|
||||
- test: 3
|
||||
- test: 4
|
||||
### Units
|
||||
- stage: Units_devel
|
||||
displayName: Units devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: devel/units/{0}/1
|
||||
targets:
|
||||
- test: 3.7
|
||||
- test: 3.8
|
||||
- test: 3.9
|
||||
- test: '3.10'
|
||||
- test: '3.11'
|
||||
- test: '3.12'
|
||||
- stage: Units_2_16
|
||||
displayName: Units 2.16
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.16/units/{0}/1
|
||||
targets:
|
||||
- test: 2.7
|
||||
- test: 3.6
|
||||
- test: "3.11"
|
||||
- stage: Units_2_15
|
||||
displayName: Units 2.15
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.15/units/{0}/1
|
||||
targets:
|
||||
- test: 3.5
|
||||
- test: "3.10"
|
||||
- stage: Units_2_14
|
||||
displayName: Units 2.14
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.14/units/{0}/1
|
||||
targets:
|
||||
- test: 3.9
|
||||
|
||||
## Remote
|
||||
- stage: Remote_devel_extra_vms
|
||||
displayName: Remote devel extra VMs
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: devel/{0}
|
||||
targets:
|
||||
- name: Alpine 3.18
|
||||
test: alpine/3.18
|
||||
# - name: Fedora 38
|
||||
# test: fedora/38
|
||||
- name: Ubuntu 22.04
|
||||
test: ubuntu/22.04
|
||||
groups:
|
||||
- vm
|
||||
- stage: Remote_devel
|
||||
displayName: Remote devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: devel/{0}
|
||||
targets:
|
||||
- name: macOS 13.2
|
||||
test: macos/13.2
|
||||
- name: RHEL 9.2
|
||||
test: rhel/9.2
|
||||
- name: FreeBSD 13.2
|
||||
test: freebsd/13.2
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Remote_2_16
|
||||
displayName: Remote 2.16
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.16/{0}
|
||||
targets:
|
||||
#- name: macOS 13.2
|
||||
# test: macos/13.2
|
||||
- name: RHEL 8.8
|
||||
test: rhel/8.8
|
||||
#- name: FreeBSD 13.2
|
||||
# test: freebsd/13.2
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Remote_2_15
|
||||
displayName: Remote 2.15
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.15/{0}
|
||||
targets:
|
||||
- name: RHEL 9.1
|
||||
test: rhel/9.1
|
||||
- name: RHEL 8.7
|
||||
test: rhel/8.7
|
||||
- name: RHEL 7.9
|
||||
test: rhel/7.9
|
||||
- name: FreeBSD 13.1
|
||||
test: freebsd/13.1
|
||||
- name: FreeBSD 12.4
|
||||
test: freebsd/12.4
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Remote_2_14
|
||||
displayName: Remote 2.14
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.14/{0}
|
||||
targets:
|
||||
#- name: macOS 12.0
|
||||
# test: macos/12.0
|
||||
- name: RHEL 9.0
|
||||
test: rhel/9.0
|
||||
#- name: FreeBSD 12.4
|
||||
# test: freebsd/12.4
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
|
||||
### Docker
|
||||
- stage: Docker_devel
|
||||
displayName: Docker devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: devel/linux/{0}
|
||||
targets:
|
||||
- name: Fedora 38
|
||||
test: fedora38
|
||||
- name: Ubuntu 20.04
|
||||
test: ubuntu2004
|
||||
- name: Ubuntu 22.04
|
||||
test: ubuntu2204
|
||||
- name: Alpine 3
|
||||
test: alpine3
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Docker_2_16
|
||||
displayName: Docker 2.16
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.16/linux/{0}
|
||||
targets:
|
||||
- name: openSUSE 15
|
||||
test: opensuse15
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Docker_2_15
|
||||
displayName: Docker 2.15
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.15/linux/{0}
|
||||
targets:
|
||||
- name: Fedora 37
|
||||
test: fedora37
|
||||
- name: CentOS 7
|
||||
test: centos7
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Docker_2_14
|
||||
displayName: Docker 2.14
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.14/linux/{0}
|
||||
targets:
|
||||
- name: Fedora 36
|
||||
test: fedora36
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
|
||||
### Community Docker
|
||||
- stage: Docker_community_devel
|
||||
displayName: Docker (community images) devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: devel/linux-community/{0}
|
||||
targets:
|
||||
- name: Debian Bullseye
|
||||
test: debian-bullseye/3.9
|
||||
- name: Debian Bookworm
|
||||
test: debian-bookworm/3.11
|
||||
- name: ArchLinux
|
||||
test: archlinux/3.11
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
|
||||
### Generic
|
||||
- stage: Generic_devel
|
||||
displayName: Generic devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: devel/generic/{0}/1
|
||||
targets:
|
||||
- test: '3.7'
|
||||
- test: '3.12'
|
||||
- stage: Generic_2_16
|
||||
displayName: Generic 2.16
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.16/generic/{0}/1
|
||||
targets:
|
||||
- test: '2.7'
|
||||
- test: '3.6'
|
||||
- test: '3.11'
|
||||
- stage: Generic_2_15
|
||||
displayName: Generic 2.15
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.15/generic/{0}/1
|
||||
targets:
|
||||
- test: '3.9'
|
||||
- stage: Generic_2_14
|
||||
displayName: Generic 2.14
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.14/generic/{0}/1
|
||||
targets:
|
||||
- test: '3.10'
|
||||
|
||||
- stage: Summary
|
||||
condition: succeededOrFailed()
|
||||
dependsOn:
|
||||
- Sanity_devel
|
||||
- Sanity_2_16
|
||||
- Sanity_2_15
|
||||
- Sanity_2_14
|
||||
- Units_devel
|
||||
- Units_2_16
|
||||
- Units_2_15
|
||||
- Units_2_14
|
||||
- Remote_devel_extra_vms
|
||||
- Remote_devel
|
||||
- Remote_2_16
|
||||
- Remote_2_15
|
||||
- Remote_2_14
|
||||
- Docker_devel
|
||||
- Docker_2_16
|
||||
- Docker_2_15
|
||||
- Docker_2_14
|
||||
- Docker_community_devel
|
||||
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
|
||||
# - Generic_devel
|
||||
# - Generic_2_16
|
||||
# - Generic_2_15
|
||||
# - Generic_2_14
|
||||
jobs:
|
||||
- template: templates/coverage.yml
|
||||
24
.azure-pipelines/scripts/aggregate-coverage.sh
Executable file
24
.azure-pipelines/scripts/aggregate-coverage.sh
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env bash
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Aggregate code coverage results for later processing.
|
||||
|
||||
set -o pipefail -eu
|
||||
|
||||
agent_temp_directory="$1"
|
||||
|
||||
PATH="${PWD}/bin:${PATH}"
|
||||
|
||||
mkdir "${agent_temp_directory}/coverage/"
|
||||
|
||||
options=(--venv --venv-system-site-packages --color -v)
|
||||
|
||||
ansible-test coverage combine --group-by command --export "${agent_temp_directory}/coverage/" "${options[@]}"
|
||||
|
||||
if ansible-test coverage analyze targets generate --help >/dev/null 2>&1; then
|
||||
# Only analyze coverage if the installed version of ansible-test supports it.
|
||||
# Doing so allows this script to work unmodified for multiple Ansible versions.
|
||||
ansible-test coverage analyze targets generate "${agent_temp_directory}/coverage/coverage-analyze-targets.json" "${options[@]}"
|
||||
fi
|
||||
64
.azure-pipelines/scripts/combine-coverage.py
Executable file
64
.azure-pipelines/scripts/combine-coverage.py
Executable file
@@ -0,0 +1,64 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
"""
|
||||
Combine coverage data from multiple jobs, keeping the data only from the most recent attempt from each job.
|
||||
Coverage artifacts must be named using the format: "Coverage $(System.JobAttempt) {StableUniqueNameForEachJob}"
|
||||
The recommended coverage artifact name format is: Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)
|
||||
Keep in mind that Azure Pipelines does not enforce unique job display names (only names).
|
||||
It is up to pipeline authors to avoid name collisions when deviating from the recommended format.
|
||||
"""
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
"""Main program entry point."""
|
||||
source_directory = sys.argv[1]
|
||||
|
||||
if '/ansible_collections/' in os.getcwd():
|
||||
output_path = "tests/output"
|
||||
else:
|
||||
output_path = "test/results"
|
||||
|
||||
destination_directory = os.path.join(output_path, 'coverage')
|
||||
|
||||
if not os.path.exists(destination_directory):
|
||||
os.makedirs(destination_directory)
|
||||
|
||||
jobs = {}
|
||||
count = 0
|
||||
|
||||
for name in os.listdir(source_directory):
|
||||
match = re.search('^Coverage (?P<attempt>[0-9]+) (?P<label>.+)$', name)
|
||||
label = match.group('label')
|
||||
attempt = int(match.group('attempt'))
|
||||
jobs[label] = max(attempt, jobs.get(label, 0))
|
||||
|
||||
for label, attempt in jobs.items():
|
||||
name = 'Coverage {attempt} {label}'.format(label=label, attempt=attempt)
|
||||
source = os.path.join(source_directory, name)
|
||||
source_files = os.listdir(source)
|
||||
|
||||
for source_file in source_files:
|
||||
source_path = os.path.join(source, source_file)
|
||||
destination_path = os.path.join(destination_directory, source_file + '.' + label)
|
||||
print('"%s" -> "%s"' % (source_path, destination_path))
|
||||
shutil.copyfile(source_path, destination_path)
|
||||
count += 1
|
||||
|
||||
print('Coverage file count: %d' % count)
|
||||
print('##vso[task.setVariable variable=coverageFileCount]%d' % count)
|
||||
print('##vso[task.setVariable variable=outputPath]%s' % output_path)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
28
.azure-pipelines/scripts/process-results.sh
Executable file
28
.azure-pipelines/scripts/process-results.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Check the test results and set variables for use in later steps.
|
||||
|
||||
set -o pipefail -eu
|
||||
|
||||
if [[ "$PWD" =~ /ansible_collections/ ]]; then
|
||||
output_path="tests/output"
|
||||
else
|
||||
output_path="test/results"
|
||||
fi
|
||||
|
||||
echo "##vso[task.setVariable variable=outputPath]${output_path}"
|
||||
|
||||
if compgen -G "${output_path}"'/junit/*.xml' > /dev/null; then
|
||||
echo "##vso[task.setVariable variable=haveTestResults]true"
|
||||
fi
|
||||
|
||||
if compgen -G "${output_path}"'/bot/ansible-test-*' > /dev/null; then
|
||||
echo "##vso[task.setVariable variable=haveBotResults]true"
|
||||
fi
|
||||
|
||||
if compgen -G "${output_path}"'/coverage/*' > /dev/null; then
|
||||
echo "##vso[task.setVariable variable=haveCoverageData]true"
|
||||
fi
|
||||
105
.azure-pipelines/scripts/publish-codecov.py
Executable file
105
.azure-pipelines/scripts/publish-codecov.py
Executable file
@@ -0,0 +1,105 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
"""
|
||||
Upload code coverage reports to codecov.io.
|
||||
Multiple coverage files from multiple languages are accepted and aggregated after upload.
|
||||
Python coverage, as well as PowerShell and Python stubs can all be uploaded.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import dataclasses
|
||||
import pathlib
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
import typing as t
|
||||
import urllib.request
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class CoverageFile:
|
||||
name: str
|
||||
path: pathlib.Path
|
||||
flags: t.List[str]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Args:
|
||||
dry_run: bool
|
||||
path: pathlib.Path
|
||||
|
||||
|
||||
def parse_args() -> Args:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-n', '--dry-run', action='store_true')
|
||||
parser.add_argument('path', type=pathlib.Path)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Store arguments in a typed dataclass
|
||||
fields = dataclasses.fields(Args)
|
||||
kwargs = {field.name: getattr(args, field.name) for field in fields}
|
||||
|
||||
return Args(**kwargs)
|
||||
|
||||
|
||||
def process_files(directory: pathlib.Path) -> t.Tuple[CoverageFile, ...]:
|
||||
processed = []
|
||||
for file in directory.joinpath('reports').glob('coverage*.xml'):
|
||||
name = file.stem.replace('coverage=', '')
|
||||
|
||||
# Get flags from name
|
||||
flags = name.replace('-powershell', '').split('=') # Drop '-powershell' suffix
|
||||
flags = [flag if not flag.startswith('stub') else flag.split('-')[0] for flag in flags] # Remove "-01" from stub files
|
||||
|
||||
processed.append(CoverageFile(name, file, flags))
|
||||
|
||||
return tuple(processed)
|
||||
|
||||
|
||||
def upload_files(codecov_bin: pathlib.Path, files: t.Tuple[CoverageFile, ...], dry_run: bool = False) -> None:
|
||||
for file in files:
|
||||
cmd = [
|
||||
str(codecov_bin),
|
||||
'--name', file.name,
|
||||
'--file', str(file.path),
|
||||
]
|
||||
for flag in file.flags:
|
||||
cmd.extend(['--flags', flag])
|
||||
|
||||
if dry_run:
|
||||
print(f'DRY-RUN: Would run command: {cmd}')
|
||||
continue
|
||||
|
||||
subprocess.run(cmd, check=True)
|
||||
|
||||
|
||||
def download_file(url: str, dest: pathlib.Path, flags: int, dry_run: bool = False) -> None:
|
||||
if dry_run:
|
||||
print(f'DRY-RUN: Would download {url} to {dest} and set mode to {flags:o}')
|
||||
return
|
||||
|
||||
with urllib.request.urlopen(url) as resp:
|
||||
with dest.open('w+b') as f:
|
||||
# Read data in chunks rather than all at once
|
||||
shutil.copyfileobj(resp, f, 64 * 1024)
|
||||
|
||||
dest.chmod(flags)
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
url = 'https://ansible-ci-files.s3.amazonaws.com/codecov/linux/codecov'
|
||||
with tempfile.TemporaryDirectory(prefix='codecov-') as tmpdir:
|
||||
codecov_bin = pathlib.Path(tmpdir) / 'codecov'
|
||||
download_file(url, codecov_bin, 0o755, args.dry_run)
|
||||
|
||||
files = process_files(args.path)
|
||||
upload_files(codecov_bin, files, args.dry_run)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
19
.azure-pipelines/scripts/report-coverage.sh
Executable file
19
.azure-pipelines/scripts/report-coverage.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Generate code coverage reports for uploading to Azure Pipelines and codecov.io.
|
||||
|
||||
set -o pipefail -eu
|
||||
|
||||
PATH="${PWD}/bin:${PATH}"
|
||||
|
||||
if ! ansible-test --help >/dev/null 2>&1; then
|
||||
# Install the devel version of ansible-test for generating code coverage reports.
|
||||
# This is only used by Ansible Collections, which are typically tested against multiple Ansible versions (in separate jobs).
|
||||
# Since a version of ansible-test is required that can work the output from multiple older releases, the devel version is used.
|
||||
pip install https://github.com/ansible/ansible/archive/devel.tar.gz --disable-pip-version-check
|
||||
fi
|
||||
|
||||
ansible-test coverage xml --group-by command --stub --venv --venv-system-site-packages --color -v
|
||||
38
.azure-pipelines/scripts/run-tests.sh
Executable file
38
.azure-pipelines/scripts/run-tests.sh
Executable file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env bash
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Configure the test environment and run the tests.
|
||||
|
||||
set -o pipefail -eu
|
||||
|
||||
entry_point="$1"
|
||||
test="$2"
|
||||
read -r -a coverage_branches <<< "$3" # space separated list of branches to run code coverage on for scheduled builds
|
||||
|
||||
export COMMIT_MESSAGE
|
||||
export COMPLETE
|
||||
export COVERAGE
|
||||
export IS_PULL_REQUEST
|
||||
|
||||
if [ "${SYSTEM_PULLREQUEST_TARGETBRANCH:-}" ]; then
|
||||
IS_PULL_REQUEST=true
|
||||
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD^2)
|
||||
else
|
||||
IS_PULL_REQUEST=
|
||||
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD)
|
||||
fi
|
||||
|
||||
COMPLETE=
|
||||
COVERAGE=
|
||||
|
||||
if [ "${BUILD_REASON}" = "Schedule" ]; then
|
||||
COMPLETE=yes
|
||||
|
||||
if printf '%s\n' "${coverage_branches[@]}" | grep -q "^${BUILD_SOURCEBRANCHNAME}$"; then
|
||||
COVERAGE=yes
|
||||
fi
|
||||
fi
|
||||
|
||||
"${entry_point}" "${test}" 2>&1 | "$(dirname "$0")/time-command.py"
|
||||
29
.azure-pipelines/scripts/time-command.py
Executable file
29
.azure-pipelines/scripts/time-command.py
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
"""Prepends a relative timestamp to each input line from stdin and writes it to stdout."""
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import sys
|
||||
import time
|
||||
|
||||
|
||||
def main():
|
||||
"""Main program entry point."""
|
||||
start = time.time()
|
||||
|
||||
sys.stdin.reconfigure(errors='surrogateescape')
|
||||
sys.stdout.reconfigure(errors='surrogateescape')
|
||||
|
||||
for line in sys.stdin:
|
||||
seconds = time.time() - start
|
||||
sys.stdout.write('%02d:%02d %s' % (seconds // 60, seconds % 60, line))
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
44
.azure-pipelines/templates/coverage.yml
Normal file
44
.azure-pipelines/templates/coverage.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# This template adds a job for processing code coverage data.
|
||||
# It will upload results to Azure Pipelines and codecov.io.
|
||||
# Use it from a job stage that completes after all other jobs have completed.
|
||||
# This can be done by placing it in a separate summary stage that runs after the test stage(s) have completed.
|
||||
|
||||
jobs:
|
||||
- job: Coverage
|
||||
displayName: Code Coverage
|
||||
container: default
|
||||
workspace:
|
||||
clean: all
|
||||
steps:
|
||||
- checkout: self
|
||||
fetchDepth: $(fetchDepth)
|
||||
path: $(checkoutPath)
|
||||
- task: DownloadPipelineArtifact@2
|
||||
displayName: Download Coverage Data
|
||||
inputs:
|
||||
path: coverage/
|
||||
patterns: "Coverage */*=coverage.combined"
|
||||
- bash: .azure-pipelines/scripts/combine-coverage.py coverage/
|
||||
displayName: Combine Coverage Data
|
||||
- bash: .azure-pipelines/scripts/report-coverage.sh
|
||||
displayName: Generate Coverage Report
|
||||
condition: gt(variables.coverageFileCount, 0)
|
||||
- task: PublishCodeCoverageResults@1
|
||||
inputs:
|
||||
codeCoverageTool: Cobertura
|
||||
# Azure Pipelines only accepts a single coverage data file.
|
||||
# That means only Python or PowerShell coverage can be uploaded, but not both.
|
||||
# Set the "pipelinesCoverage" variable to determine which type is uploaded.
|
||||
# Use "coverage" for Python and "coverage-powershell" for PowerShell.
|
||||
summaryFileLocation: "$(outputPath)/reports/$(pipelinesCoverage).xml"
|
||||
displayName: Publish to Azure Pipelines
|
||||
condition: gt(variables.coverageFileCount, 0)
|
||||
- bash: .azure-pipelines/scripts/publish-codecov.py "$(outputPath)"
|
||||
displayName: Publish to codecov.io
|
||||
condition: gt(variables.coverageFileCount, 0)
|
||||
continueOnError: true
|
||||
60
.azure-pipelines/templates/matrix.yml
Normal file
60
.azure-pipelines/templates/matrix.yml
Normal file
@@ -0,0 +1,60 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# This template uses the provided targets and optional groups to generate a matrix which is then passed to the test template.
|
||||
# If this matrix template does not provide the required functionality, consider using the test template directly instead.
|
||||
|
||||
parameters:
|
||||
# A required list of dictionaries, one per test target.
|
||||
# Each item in the list must contain a "test" or "name" key.
|
||||
# Both may be provided. If one is omitted, the other will be used.
|
||||
- name: targets
|
||||
type: object
|
||||
|
||||
# An optional list of values which will be used to multiply the targets list into a matrix.
|
||||
# Values can be strings or numbers.
|
||||
- name: groups
|
||||
type: object
|
||||
default: []
|
||||
|
||||
# An optional format string used to generate the job name.
|
||||
# - {0} is the name of an item in the targets list.
|
||||
- name: nameFormat
|
||||
type: string
|
||||
default: "{0}"
|
||||
|
||||
# An optional format string used to generate the test name.
|
||||
# - {0} is the name of an item in the targets list.
|
||||
- name: testFormat
|
||||
type: string
|
||||
default: "{0}"
|
||||
|
||||
# An optional format string used to add the group to the job name.
|
||||
# {0} is the formatted name of an item in the targets list.
|
||||
# {{1}} is the group -- be sure to include the double "{{" and "}}".
|
||||
- name: nameGroupFormat
|
||||
type: string
|
||||
default: "{0} - {{1}}"
|
||||
|
||||
# An optional format string used to add the group to the test name.
|
||||
# {0} is the formatted test of an item in the targets list.
|
||||
# {{1}} is the group -- be sure to include the double "{{" and "}}".
|
||||
- name: testGroupFormat
|
||||
type: string
|
||||
default: "{0}/{{1}}"
|
||||
|
||||
jobs:
|
||||
- template: test.yml
|
||||
parameters:
|
||||
jobs:
|
||||
- ${{ if eq(length(parameters.groups), 0) }}:
|
||||
- ${{ each target in parameters.targets }}:
|
||||
- name: ${{ format(parameters.nameFormat, coalesce(target.name, target.test)) }}
|
||||
test: ${{ format(parameters.testFormat, coalesce(target.test, target.name)) }}
|
||||
- ${{ if not(eq(length(parameters.groups), 0)) }}:
|
||||
- ${{ each group in parameters.groups }}:
|
||||
- ${{ each target in parameters.targets }}:
|
||||
- name: ${{ format(format(parameters.nameGroupFormat, parameters.nameFormat), coalesce(target.name, target.test), group) }}
|
||||
test: ${{ format(format(parameters.testGroupFormat, parameters.testFormat), coalesce(target.test, target.name), group) }}
|
||||
50
.azure-pipelines/templates/test.yml
Normal file
50
.azure-pipelines/templates/test.yml
Normal file
@@ -0,0 +1,50 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# This template uses the provided list of jobs to create test one or more test jobs.
|
||||
# It can be used directly if needed, or through the matrix template.
|
||||
|
||||
parameters:
|
||||
# A required list of dictionaries, one per test job.
|
||||
# Each item in the list must contain a "job" and "name" key.
|
||||
- name: jobs
|
||||
type: object
|
||||
|
||||
jobs:
|
||||
- ${{ each job in parameters.jobs }}:
|
||||
- job: test_${{ replace(replace(replace(job.test, '/', '_'), '.', '_'), '-', '_') }}
|
||||
displayName: ${{ job.name }}
|
||||
container: default
|
||||
workspace:
|
||||
clean: all
|
||||
steps:
|
||||
- checkout: self
|
||||
fetchDepth: $(fetchDepth)
|
||||
path: $(checkoutPath)
|
||||
- bash: .azure-pipelines/scripts/run-tests.sh "$(entryPoint)" "${{ job.test }}" "$(coverageBranches)"
|
||||
displayName: Run Tests
|
||||
- bash: .azure-pipelines/scripts/process-results.sh
|
||||
condition: succeededOrFailed()
|
||||
displayName: Process Results
|
||||
- bash: .azure-pipelines/scripts/aggregate-coverage.sh "$(Agent.TempDirectory)"
|
||||
condition: eq(variables.haveCoverageData, 'true')
|
||||
displayName: Aggregate Coverage Data
|
||||
- task: PublishTestResults@2
|
||||
condition: eq(variables.haveTestResults, 'true')
|
||||
inputs:
|
||||
testResultsFiles: "$(outputPath)/junit/*.xml"
|
||||
displayName: Publish Test Results
|
||||
- task: PublishPipelineArtifact@1
|
||||
condition: eq(variables.haveBotResults, 'true')
|
||||
displayName: Publish Bot Results
|
||||
inputs:
|
||||
targetPath: "$(outputPath)/bot/"
|
||||
artifactName: "Bot $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
|
||||
- task: PublishPipelineArtifact@1
|
||||
condition: eq(variables.haveCoverageData, 'true')
|
||||
displayName: Publish Coverage Data
|
||||
inputs:
|
||||
targetPath: "$(Agent.TempDirectory)/coverage/"
|
||||
artifactName: "Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
|
||||
11
.github/BOTMETA.yml
vendored
11
.github/BOTMETA.yml
vendored
@@ -497,6 +497,9 @@ files:
|
||||
$modules/facter.py:
|
||||
labels: facter
|
||||
maintainers: $team_ansible_core gamethis
|
||||
$modules/facter_facts.py:
|
||||
labels: facter
|
||||
maintainers: russoz $team_ansible_core gamethis
|
||||
$modules/filesize.py:
|
||||
maintainers: quidame
|
||||
$modules/filesystem.py:
|
||||
@@ -642,7 +645,6 @@ files:
|
||||
maintainers: bregman-arie
|
||||
$modules/ipa_:
|
||||
maintainers: $team_ipa
|
||||
ignore: fxfitz
|
||||
$modules/ipbase_info.py:
|
||||
maintainers: dominikkukacka
|
||||
$modules/ipa_pwpolicy.py:
|
||||
@@ -1395,8 +1397,6 @@ files:
|
||||
ignore: matze
|
||||
labels: zypper
|
||||
maintainers: $team_suse
|
||||
$plugin_utils/unsafe.py:
|
||||
maintainers: felixfontein
|
||||
$tests/a_module.py:
|
||||
maintainers: felixfontein
|
||||
#########################
|
||||
@@ -1416,6 +1416,7 @@ macros:
|
||||
becomes: plugins/become
|
||||
caches: plugins/cache
|
||||
callbacks: plugins/callback
|
||||
cliconfs: plugins/cliconf
|
||||
connections: plugins/connection
|
||||
doc_fragments: plugins/doc_fragments
|
||||
filters: plugins/filter
|
||||
@@ -1423,7 +1424,7 @@ macros:
|
||||
lookups: plugins/lookup
|
||||
module_utils: plugins/module_utils
|
||||
modules: plugins/modules
|
||||
plugin_utils: plugins/plugin_utils
|
||||
terminals: plugins/terminal
|
||||
tests: plugins/test
|
||||
team_ansible_core:
|
||||
team_aix: MorrisA bcoca d-little flynn1973 gforster kairoaraujo marvin-sinister mator molekuul ramooncamacho wtcross
|
||||
@@ -1435,7 +1436,7 @@ macros:
|
||||
team_gitlab: Lunik Shaps marwatk waheedi zanssa scodeman metanovii sh0shin nejch lgatellier suukit
|
||||
team_hpux: bcoca davx8342
|
||||
team_huawei: QijunPan TommyLike edisonxiang freesky-edward hwDCN niuzhenguo xuxiaowei0512 yanzhangi zengchen1024 zhongjun2
|
||||
team_ipa: Akasurde Nosmoht justchris1
|
||||
team_ipa: Akasurde Nosmoht fxfitz justchris1
|
||||
team_jboss: Wolfant jairojunior wbrefvem
|
||||
team_keycloak: eikef ndclt mattock
|
||||
team_linode: InTheCloudDan decentral1se displague rmcintosh Charliekenney23 LBGarber
|
||||
|
||||
181
.github/workflows/ansible-test.yml
vendored
Normal file
181
.github/workflows/ansible-test.yml
vendored
Normal file
@@ -0,0 +1,181 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# For the comprehensive list of the inputs supported by the ansible-community/ansible-test-gh-action GitHub Action, see
|
||||
# https://github.com/marketplace/actions/ansible-test
|
||||
|
||||
name: EOL CI
|
||||
on:
|
||||
# Run EOL CI against all pushes (direct commits, also merged PRs), Pull Requests
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- stable-*
|
||||
pull_request:
|
||||
# Run EOL CI once per day (at 08:00 UTC)
|
||||
schedule:
|
||||
- cron: '0 8 * * *'
|
||||
|
||||
concurrency:
|
||||
# Make sure there is at most one active run per PR, but do not cancel any non-PR runs
|
||||
group: ${{ github.workflow }}-${{ (github.head_ref && github.event.number) || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
sanity:
|
||||
name: EOL Sanity (Ⓐ${{ matrix.ansible }})
|
||||
strategy:
|
||||
matrix:
|
||||
ansible:
|
||||
- '2.13'
|
||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
||||
# image for these stable branches. The list of branches where this is necessary will
|
||||
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
|
||||
# for the latest list.
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Perform sanity testing
|
||||
uses: felixfontein/ansible-test-gh-action@main
|
||||
with:
|
||||
ansible-core-version: stable-${{ matrix.ansible }}
|
||||
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
||||
pull-request-change-detection: 'true'
|
||||
testing-type: sanity
|
||||
|
||||
units:
|
||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
||||
# image for these stable branches. The list of branches where this is necessary will
|
||||
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
|
||||
# for the latest list.
|
||||
runs-on: ubuntu-latest
|
||||
name: EOL Units (Ⓐ${{ matrix.ansible }}+py${{ matrix.python }})
|
||||
strategy:
|
||||
# As soon as the first unit test fails, cancel the others to free up the CI queue
|
||||
fail-fast: true
|
||||
matrix:
|
||||
ansible:
|
||||
- ''
|
||||
python:
|
||||
- ''
|
||||
exclude:
|
||||
- ansible: ''
|
||||
include:
|
||||
- ansible: '2.13'
|
||||
python: '2.7'
|
||||
- ansible: '2.13'
|
||||
python: '3.8'
|
||||
- ansible: '2.13'
|
||||
python: '2.7'
|
||||
- ansible: '2.13'
|
||||
python: '3.8'
|
||||
|
||||
steps:
|
||||
- name: >-
|
||||
Perform unit testing against
|
||||
Ansible version ${{ matrix.ansible }}
|
||||
uses: felixfontein/ansible-test-gh-action@main
|
||||
with:
|
||||
ansible-core-version: stable-${{ matrix.ansible }}
|
||||
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
||||
pre-test-cmd: >-
|
||||
mkdir -p ../../ansible
|
||||
;
|
||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
|
||||
pull-request-change-detection: 'true'
|
||||
target-python-version: ${{ matrix.python }}
|
||||
testing-type: units
|
||||
|
||||
integration:
|
||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
||||
# image for these stable branches. The list of branches where this is necessary will
|
||||
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
|
||||
# for the latest list.
|
||||
runs-on: ubuntu-latest
|
||||
name: EOL I (Ⓐ${{ matrix.ansible }}+${{ matrix.docker }}+py${{ matrix.python }}:${{ matrix.target }})
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
ansible:
|
||||
- ''
|
||||
docker:
|
||||
- ''
|
||||
python:
|
||||
- ''
|
||||
target:
|
||||
- ''
|
||||
exclude:
|
||||
- ansible: ''
|
||||
include:
|
||||
# 2.13
|
||||
- ansible: '2.13'
|
||||
docker: fedora35
|
||||
python: ''
|
||||
target: azp/posix/1/
|
||||
- ansible: '2.13'
|
||||
docker: fedora35
|
||||
python: ''
|
||||
target: azp/posix/2/
|
||||
- ansible: '2.13'
|
||||
docker: fedora35
|
||||
python: ''
|
||||
target: azp/posix/3/
|
||||
- ansible: '2.13'
|
||||
docker: opensuse15py2
|
||||
python: ''
|
||||
target: azp/posix/1/
|
||||
- ansible: '2.13'
|
||||
docker: opensuse15py2
|
||||
python: ''
|
||||
target: azp/posix/2/
|
||||
- ansible: '2.13'
|
||||
docker: opensuse15py2
|
||||
python: ''
|
||||
target: azp/posix/3/
|
||||
- ansible: '2.13'
|
||||
docker: alpine3
|
||||
python: ''
|
||||
target: azp/posix/1/
|
||||
- ansible: '2.13'
|
||||
docker: alpine3
|
||||
python: ''
|
||||
target: azp/posix/2/
|
||||
- ansible: '2.13'
|
||||
docker: alpine3
|
||||
python: ''
|
||||
target: azp/posix/3/
|
||||
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
|
||||
# - ansible: '2.13'
|
||||
# docker: default
|
||||
# python: '3.9'
|
||||
# target: azp/generic/1/
|
||||
|
||||
steps:
|
||||
- name: >-
|
||||
Perform integration testing against
|
||||
Ansible version ${{ matrix.ansible }}
|
||||
under Python ${{ matrix.python }}
|
||||
uses: felixfontein/ansible-test-gh-action@main
|
||||
with:
|
||||
ansible-core-version: stable-${{ matrix.ansible }}
|
||||
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
||||
docker-image: ${{ matrix.docker }}
|
||||
integration-continue-on-error: 'false'
|
||||
integration-diff: 'false'
|
||||
integration-retry-on-error: 'true'
|
||||
pre-test-cmd: >-
|
||||
mkdir -p ../../ansible
|
||||
;
|
||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/ansible.posix.git ../../ansible/posix
|
||||
;
|
||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.crypto.git ../../community/crypto
|
||||
;
|
||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
|
||||
pull-request-change-detection: 'true'
|
||||
target: ${{ matrix.target }}
|
||||
target-python-version: ${{ matrix.python }}
|
||||
testing-type: integration
|
||||
36
.github/workflows/codeql-analysis.yml
vendored
Normal file
36
.github/workflows/codeql-analysis.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
name: "Code scanning - action"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '26 19 * * 1'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
|
||||
permissions:
|
||||
actions: read # for github/codeql-action/init to get workflow details
|
||||
contents: read # for actions/checkout to fetch code
|
||||
security-events: write # for github/codeql-action/autobuild to send a status report
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
35
.github/workflows/reuse.yml
vendored
Normal file
35
.github/workflows/reuse.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
name: Verify REUSE
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request_target:
|
||||
types: [opened, synchronize, reopened]
|
||||
branches: [main]
|
||||
# Run CI once per day (at 07:30 UTC)
|
||||
schedule:
|
||||
- cron: '30 7 * * *'
|
||||
|
||||
jobs:
|
||||
check:
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha || '' }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install reuse
|
||||
|
||||
- name: Check REUSE compliance
|
||||
run: |
|
||||
reuse lint
|
||||
1012
CHANGELOG.md
1012
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -1,3 +0,0 @@
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: Ansible Project
|
||||
1027
CHANGELOG.rst
1027
CHANGELOG.rst
File diff suppressed because it is too large
Load Diff
@@ -31,9 +31,7 @@ Also, consider taking up a valuable, reviewed, but abandoned pull request which
|
||||
* Try committing your changes with an informative but short commit message.
|
||||
* Do not squash your commits and force-push to your branch if not needed. Reviews of your pull request are much easier with individual commits to comprehend the pull request history. All commits of your pull request branch will be squashed into one commit by GitHub upon merge.
|
||||
* Do not add merge commits to your PR. The bot will complain and you will have to rebase ([instructions for rebasing](https://docs.ansible.com/ansible/latest/dev_guide/developing_rebasing.html)) to remove them before your PR can be merged. To avoid that git automatically does merges during pulls, you can configure it to do rebases instead by running `git config pull.rebase true` inside the repository checkout.
|
||||
* Make sure your PR includes a [changelog fragment](https://docs.ansible.com/ansible/devel/community/collection_development_process.html#creating-a-changelog-fragment).
|
||||
* You must not include a fragment for new modules or new plugins. Also you shouldn't include one for docs-only changes. (If you're not sure, simply don't include one, we'll tell you whether one is needed or not :) )
|
||||
* Please always include a link to the pull request itself, and if the PR is about an issue, also a link to the issue. Also make sure the fragment ends with a period, and begins with a lower-case letter after `-`. (Again, if you don't do this, we'll add suggestions to fix it, so don't worry too much :) )
|
||||
* Make sure your PR includes a [changelog fragment](https://docs.ansible.com/ansible/devel/community/development_process.html#creating-changelog-fragments). (You must not include a fragment for new modules or new plugins. Also you shouldn't include one for docs-only changes. If you're not sure, simply don't include one, we'll tell you whether one is needed or not :) )
|
||||
* Avoid reformatting unrelated parts of the codebase in your PR. These types of changes will likely be requested for reversion, create additional work for reviewers, and may cause approval to be delayed.
|
||||
|
||||
You can also read [our Quick-start development guide](https://github.com/ansible/community-docs/blob/main/create_pr_quick_start_guide.rst).
|
||||
|
||||
21
README.md
21
README.md
@@ -6,10 +6,9 @@ SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Community General Collection
|
||||
|
||||
[](https://dev.azure.com/ansible/community.general/_build?definitionId=31)
|
||||
[](https://dev.azure.com/ansible/community.general/_build?definitionId=31)
|
||||
[](https://github.com/ansible-collections/community.general/actions)
|
||||
[](https://codecov.io/gh/ansible-collections/community.general)
|
||||
[](https://api.reuse.software/info/github.com/ansible-collections/community.general)
|
||||
|
||||
This repository contains the `community.general` Ansible Collection. The collection is a part of the Ansible package and includes many modules and plugins supported by Ansible community which are not part of more specialized community collections.
|
||||
|
||||
@@ -25,9 +24,7 @@ If you encounter abusive behavior violating the [Ansible Code of Conduct](https:
|
||||
|
||||
## Tested with Ansible
|
||||
|
||||
Tested with the current ansible-core 2.11, ansible-core 2.12, ansible-core 2.13, ansible-core 2.14, ansible-core 2.15, ansible-core 2.16, and ansible-core 2.17 releases. Ansible-core versions before 2.11.0 are not supported. This includes all ansible-base 2.10 and Ansible 2.9 releases.
|
||||
|
||||
Parts of this collection will not work with ansible-core 2.11 on Python 3.12+.
|
||||
Tested with the current ansible-core 2.13, ansible-core 2.14, ansible-core 2.15, ansible-core 2.16 releases and the current development version of ansible-core. Ansible-core versions before 2.13.0 are not supported. This includes all ansible-base 2.10 and Ansible 2.9 releases.
|
||||
|
||||
## External requirements
|
||||
|
||||
@@ -74,13 +71,13 @@ We are actively accepting new contributors.
|
||||
|
||||
All types of contributions are very welcome.
|
||||
|
||||
You don't know how to start? Refer to our [contribution guide](https://github.com/ansible-collections/community.general/blob/stable-7/CONTRIBUTING.md)!
|
||||
You don't know how to start? Refer to our [contribution guide](https://github.com/ansible-collections/community.general/blob/main/CONTRIBUTING.md)!
|
||||
|
||||
The current maintainers are listed in the [commit-rights.md](https://github.com/ansible-collections/community.general/blob/stable-7/commit-rights.md#people) file. If you have questions or need help, feel free to mention them in the proposals.
|
||||
The current maintainers are listed in the [commit-rights.md](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md#people) file. If you have questions or need help, feel free to mention them in the proposals.
|
||||
|
||||
You can find more information in the [developer guide for collections](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#contributing-to-collections), and in the [Ansible Community Guide](https://docs.ansible.com/ansible/latest/community/index.html).
|
||||
|
||||
Also for some notes specific to this collection see [our CONTRIBUTING documentation](https://github.com/ansible-collections/community.general/blob/stable-7/CONTRIBUTING.md).
|
||||
Also for some notes specific to this collection see [our CONTRIBUTING documentation](https://github.com/ansible-collections/community.general/blob/main/CONTRIBUTING.md).
|
||||
|
||||
### Running tests
|
||||
|
||||
@@ -90,7 +87,7 @@ See [here](https://docs.ansible.com/ansible/devel/dev_guide/developing_collectio
|
||||
|
||||
To learn how to maintain / become a maintainer of this collection, refer to:
|
||||
|
||||
* [Committer guidelines](https://github.com/ansible-collections/community.general/blob/stable-7/commit-rights.md).
|
||||
* [Committer guidelines](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md).
|
||||
* [Maintainer guidelines](https://github.com/ansible/community-docs/blob/main/maintaining.rst).
|
||||
|
||||
It is necessary for maintainers of this collection to be subscribed to:
|
||||
@@ -118,7 +115,7 @@ See the [Releasing guidelines](https://github.com/ansible/community-docs/blob/ma
|
||||
|
||||
## Release notes
|
||||
|
||||
See the [changelog](https://github.com/ansible-collections/community.general/blob/stable-7/CHANGELOG.md).
|
||||
See the [changelog](https://github.com/ansible-collections/community.general/blob/stable-8/CHANGELOG.rst).
|
||||
|
||||
## Roadmap
|
||||
|
||||
@@ -137,8 +134,8 @@ See [this issue](https://github.com/ansible-collections/community.general/issues
|
||||
|
||||
This collection is primarily licensed and distributed as a whole under the GNU General Public License v3.0 or later.
|
||||
|
||||
See [LICENSES/GPL-3.0-or-later.txt](https://github.com/ansible-collections/community.general/blob/stable-7/COPYING) for the full text.
|
||||
See [LICENSES/GPL-3.0-or-later.txt](https://github.com/ansible-collections/community.general/blob/main/COPYING) for the full text.
|
||||
|
||||
Parts of the collection are licensed under the [BSD 2-Clause license](https://github.com/ansible-collections/community.general/blob/stable-7/LICENSES/BSD-2-Clause.txt), the [MIT license](https://github.com/ansible-collections/community.general/blob/stable-7/LICENSES/MIT.txt), and the [PSF 2.0 license](https://github.com/ansible-collections/community.general/blob/stable-7/LICENSES/PSF-2.0.txt).
|
||||
Parts of the collection are licensed under the [BSD 2-Clause license](https://github.com/ansible-collections/community.general/blob/main/LICENSES/BSD-2-Clause.txt), the [MIT license](https://github.com/ansible-collections/community.general/blob/main/LICENSES/MIT.txt), and the [PSF 2.0 license](https://github.com/ansible-collections/community.general/blob/main/LICENSES/PSF-2.0.txt).
|
||||
|
||||
All files have a machine readable `SDPX-License-Identifier:` comment denoting its respective license(s) or an equivalent entry in an accompanying `.license` file. Only changelog fragments (which will not be part of a release) are covered by a blanket statement in `.reuse/dep5`. This conforms to the [REUSE specification](https://reuse.software/spec/).
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -12,31 +12,23 @@ mention_ancestor: true
|
||||
flatmap: true
|
||||
new_plugins_after_name: removed_features
|
||||
notesdir: fragments
|
||||
output_formats:
|
||||
- md
|
||||
- rst
|
||||
prelude_section_name: release_summary
|
||||
prelude_section_title: Release Summary
|
||||
sections:
|
||||
- - major_changes
|
||||
- Major Changes
|
||||
- - minor_changes
|
||||
- Minor Changes
|
||||
- - breaking_changes
|
||||
- Breaking Changes / Porting Guide
|
||||
- - deprecated_features
|
||||
- Deprecated Features
|
||||
- - removed_features
|
||||
- Removed Features (previously deprecated)
|
||||
- - security_fixes
|
||||
- Security Fixes
|
||||
- - bugfixes
|
||||
- Bugfixes
|
||||
- - known_issues
|
||||
- Known Issues
|
||||
- - major_changes
|
||||
- Major Changes
|
||||
- - minor_changes
|
||||
- Minor Changes
|
||||
- - breaking_changes
|
||||
- Breaking Changes / Porting Guide
|
||||
- - deprecated_features
|
||||
- Deprecated Features
|
||||
- - removed_features
|
||||
- Removed Features (previously deprecated)
|
||||
- - security_fixes
|
||||
- Security Fixes
|
||||
- - bugfixes
|
||||
- Bugfixes
|
||||
- - known_issues
|
||||
- Known Issues
|
||||
title: Community General
|
||||
trivial_section_name: trivial
|
||||
use_fqcn: true
|
||||
add_plugin_period: true
|
||||
changelog_nice_yaml: true
|
||||
changelog_sort: version
|
||||
|
||||
@@ -22,7 +22,6 @@ communication:
|
||||
- topic: General usage and support questions
|
||||
network: Libera
|
||||
channel: '#ansible'
|
||||
forums:
|
||||
- topic: Ansible Forum
|
||||
# The following URL directly points to the "Get Help" section
|
||||
url: https://forum.ansible.com/c/help/6/none
|
||||
mailing_lists:
|
||||
- topic: Ansible Project List
|
||||
url: https://groups.google.com/g/ansible-project
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
namespace: community
|
||||
name: general
|
||||
version: 7.5.9
|
||||
version: 8.0.2
|
||||
readme: README.md
|
||||
authors:
|
||||
- Ansible (https://github.com/ansible)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
requires_ansible: '>=2.11.0'
|
||||
requires_ansible: '>=2.13.0'
|
||||
plugin_routing:
|
||||
connection:
|
||||
docker:
|
||||
|
||||
@@ -84,7 +84,6 @@ import time
|
||||
import uuid
|
||||
|
||||
from collections import OrderedDict
|
||||
from contextlib import closing
|
||||
from os.path import basename
|
||||
|
||||
from ansible.errors import AnsibleError, AnsibleRuntimeError
|
||||
@@ -202,25 +201,24 @@ class ElasticSource(object):
|
||||
|
||||
apm_cli = self.init_apm_client(apm_server_url, apm_service_name, apm_verify_server_cert, apm_secret_token, apm_api_key)
|
||||
if apm_cli:
|
||||
with closing(apm_cli):
|
||||
instrument() # Only call this once, as early as possible.
|
||||
if traceparent:
|
||||
parent = trace_parent_from_string(traceparent)
|
||||
apm_cli.begin_transaction("Session", trace_parent=parent, start=parent_start_time)
|
||||
else:
|
||||
apm_cli.begin_transaction("Session", start=parent_start_time)
|
||||
# Populate trace metadata attributes
|
||||
if self.ansible_version is not None:
|
||||
label(ansible_version=self.ansible_version)
|
||||
label(ansible_session=self.session, ansible_host_name=self.host, ansible_host_user=self.user)
|
||||
if self.ip_address is not None:
|
||||
label(ansible_host_ip=self.ip_address)
|
||||
instrument() # Only call this once, as early as possible.
|
||||
if traceparent:
|
||||
parent = trace_parent_from_string(traceparent)
|
||||
apm_cli.begin_transaction("Session", trace_parent=parent, start=parent_start_time)
|
||||
else:
|
||||
apm_cli.begin_transaction("Session", start=parent_start_time)
|
||||
# Populate trace metadata attributes
|
||||
if self.ansible_version is not None:
|
||||
label(ansible_version=self.ansible_version)
|
||||
label(ansible_session=self.session, ansible_host_name=self.host, ansible_host_user=self.user)
|
||||
if self.ip_address is not None:
|
||||
label(ansible_host_ip=self.ip_address)
|
||||
|
||||
for task_data in tasks:
|
||||
for host_uuid, host_data in task_data.host_data.items():
|
||||
self.create_span_data(apm_cli, task_data, host_data)
|
||||
for task_data in tasks:
|
||||
for host_uuid, host_data in task_data.host_data.items():
|
||||
self.create_span_data(apm_cli, task_data, host_data)
|
||||
|
||||
apm_cli.end_transaction(name=__name__, result=status, duration=end_time - parent_start_time)
|
||||
apm_cli.end_transaction(name=__name__, result=status, duration=end_time - parent_start_time)
|
||||
|
||||
def create_span_data(self, apm_cli, task_data, host_data):
|
||||
""" create the span with the given TaskData and HostData """
|
||||
|
||||
@@ -18,7 +18,7 @@ DOCUMENTATION = '''
|
||||
requirements:
|
||||
- whitelisting in configuration
|
||||
- certifi (Python library)
|
||||
- flatdict (Python library), if you want to use the O(flatten) option
|
||||
- flatdict (Python library), if you want to use the 'flatten' option
|
||||
options:
|
||||
api:
|
||||
description: URI to the Logentries API.
|
||||
@@ -90,9 +90,9 @@ examples: >
|
||||
api = data.logentries.com
|
||||
port = 10000
|
||||
tls_port = 20000
|
||||
use_tls = true
|
||||
use_tls = no
|
||||
token = dd21fc88-f00a-43ff-b977-e3a4233c53af
|
||||
flatten = false
|
||||
flatten = False
|
||||
'''
|
||||
|
||||
import os
|
||||
@@ -196,11 +196,15 @@ else:
|
||||
class TLSSocketAppender(PlainTextSocketAppender):
|
||||
def open_connection(self):
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
context = ssl.create_default_context(
|
||||
purpose=ssl.Purpose.SERVER_AUTH,
|
||||
cafile=certifi.where(), )
|
||||
sock = context.wrap_socket(
|
||||
sock = ssl.wrap_socket(
|
||||
sock=sock,
|
||||
keyfile=None,
|
||||
certfile=None,
|
||||
server_side=False,
|
||||
cert_reqs=ssl.CERT_REQUIRED,
|
||||
ssl_version=getattr(
|
||||
ssl, 'PROTOCOL_TLSv1_2', ssl.PROTOCOL_TLSv1),
|
||||
ca_certs=certifi.where(),
|
||||
do_handshake_on_connect=True,
|
||||
suppress_ragged_eofs=True, )
|
||||
sock.connect((self.LE_API, self.LE_TLS_PORT))
|
||||
|
||||
@@ -350,8 +350,7 @@ class OpenTelemetrySource(object):
|
||||
if not disable_logs:
|
||||
# This will avoid populating span attributes to the logs
|
||||
span.add_event(task_data.dump, attributes={} if disable_attributes_in_logs else attributes)
|
||||
# Close span always
|
||||
span.end(end_time=host_data.finish)
|
||||
span.end(end_time=host_data.finish)
|
||||
|
||||
def set_span_attributes(self, span, attributes):
|
||||
""" update the span attributes with the given attributes if not None """
|
||||
@@ -498,12 +497,6 @@ class CallbackModule(CallbackBase):
|
||||
# See https://github.com/open-telemetry/opentelemetry-specification/issues/740
|
||||
self.traceparent = self.get_option('traceparent')
|
||||
|
||||
def dump_results(self, result):
|
||||
""" dump the results if disable_logs is not enabled """
|
||||
if self.disable_logs:
|
||||
return ""
|
||||
return self._dump_results(result._result)
|
||||
|
||||
def v2_playbook_on_start(self, playbook):
|
||||
self.ansible_playbook = basename(playbook._file_name)
|
||||
|
||||
@@ -553,7 +546,7 @@ class CallbackModule(CallbackBase):
|
||||
self.tasks_data,
|
||||
status,
|
||||
result,
|
||||
self.dump_results(result)
|
||||
self._dump_results(result._result)
|
||||
)
|
||||
|
||||
def v2_runner_on_ok(self, result):
|
||||
@@ -561,7 +554,7 @@ class CallbackModule(CallbackBase):
|
||||
self.tasks_data,
|
||||
'ok',
|
||||
result,
|
||||
self.dump_results(result)
|
||||
self._dump_results(result._result)
|
||||
)
|
||||
|
||||
def v2_runner_on_skipped(self, result):
|
||||
@@ -569,7 +562,7 @@ class CallbackModule(CallbackBase):
|
||||
self.tasks_data,
|
||||
'skipped',
|
||||
result,
|
||||
self.dump_results(result)
|
||||
self._dump_results(result._result)
|
||||
)
|
||||
|
||||
def v2_playbook_on_include(self, included_file):
|
||||
|
||||
@@ -44,26 +44,17 @@ from ansible import constants as C
|
||||
from ansible.plugins.callback import CallbackBase
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
||||
try:
|
||||
codeCodes = C.COLOR_CODES
|
||||
except AttributeError:
|
||||
# This constant was moved to ansible.constants in
|
||||
# https://github.com/ansible/ansible/commit/1202dd000f10b0e8959019484f1c3b3f9628fc67
|
||||
# (will be included in ansible-core 2.11.0). For older Ansible/ansible-base versions,
|
||||
# we include from the original location.
|
||||
from ansible.utils.color import codeCodes
|
||||
|
||||
|
||||
DONT_COLORIZE = False
|
||||
COLORS = {
|
||||
'normal': '\033[0m',
|
||||
'ok': '\033[{0}m'.format(codeCodes[C.COLOR_OK]),
|
||||
'ok': '\033[{0}m'.format(C.COLOR_CODES[C.COLOR_OK]),
|
||||
'bold': '\033[1m',
|
||||
'not_so_bold': '\033[1m\033[34m',
|
||||
'changed': '\033[{0}m'.format(codeCodes[C.COLOR_CHANGED]),
|
||||
'failed': '\033[{0}m'.format(codeCodes[C.COLOR_ERROR]),
|
||||
'changed': '\033[{0}m'.format(C.COLOR_CODES[C.COLOR_CHANGED]),
|
||||
'failed': '\033[{0}m'.format(C.COLOR_CODES[C.COLOR_ERROR]),
|
||||
'endc': '\033[0m',
|
||||
'skipped': '\033[{0}m'.format(codeCodes[C.COLOR_SKIP]),
|
||||
'skipped': '\033[{0}m'.format(C.COLOR_CODES[C.COLOR_SKIP]),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -19,16 +19,6 @@ DOCUMENTATION = '''
|
||||
- default_callback
|
||||
requirements:
|
||||
- set as stdout in configuration
|
||||
seealso:
|
||||
- plugin: ansible.builtin.default
|
||||
plugin_type: callback
|
||||
description: >
|
||||
There is a parameter O(ansible.builtin.default#callback:result_format) in P(ansible.builtin.default#callback)
|
||||
that allows you to change the output format to YAML.
|
||||
notes:
|
||||
- >
|
||||
With ansible-core 2.13 or newer, you can instead specify V(yaml) for the parameter O(ansible.builtin.default#callback:result_format)
|
||||
in P(ansible.builtin.default#callback).
|
||||
'''
|
||||
|
||||
import yaml
|
||||
|
||||
@@ -71,10 +71,11 @@ class Connection(ConnectionBase):
|
||||
msg = "lxc python bindings are not installed"
|
||||
raise errors.AnsibleError(msg)
|
||||
|
||||
if self.container:
|
||||
container_name = self.get_option('remote_addr')
|
||||
if self.container and self.container_name == container_name:
|
||||
return
|
||||
|
||||
self.container_name = self.get_option('remote_addr')
|
||||
self.container_name = container_name
|
||||
|
||||
self._display.vvv("THIS IS A LOCAL LXC DIR", host=self.container_name)
|
||||
self.container = _lxc.Container(self.container_name)
|
||||
|
||||
@@ -10,13 +10,15 @@ __metaclass__ = type
|
||||
DOCUMENTATION = '''
|
||||
author: Matt Clay (@mattclay) <matt@mystile.com>
|
||||
name: lxd
|
||||
short_description: Run tasks in LXD instances via C(lxc) CLI
|
||||
short_description: Run tasks in lxc containers via lxc CLI
|
||||
description:
|
||||
- Run commands or put/fetch files to an existing instance using C(lxc) CLI.
|
||||
- Run commands or put/fetch files to an existing lxc container using lxc CLI
|
||||
options:
|
||||
remote_addr:
|
||||
description:
|
||||
- Container identifier.
|
||||
- Instance (container/VM) identifier.
|
||||
- Since community.general 8.0.0, a FQDN can be provided; in that case, the first component (the part before C(.))
|
||||
is used as the instance identifier.
|
||||
default: inventory_hostname
|
||||
vars:
|
||||
- name: inventory_hostname
|
||||
@@ -24,7 +26,7 @@ DOCUMENTATION = '''
|
||||
- name: ansible_lxd_host
|
||||
executable:
|
||||
description:
|
||||
- Shell to use for execution inside instance.
|
||||
- shell to use for execution inside container
|
||||
default: /bin/sh
|
||||
vars:
|
||||
- name: ansible_executable
|
||||
@@ -69,28 +71,32 @@ class Connection(ConnectionBase):
|
||||
raise AnsibleError("lxc command not found in PATH")
|
||||
|
||||
if self._play_context.remote_user is not None and self._play_context.remote_user != 'root':
|
||||
self._display.warning('lxd does not support remote_user, using default: root')
|
||||
self._display.warning('lxd does not support remote_user, using container default: root')
|
||||
|
||||
def _host(self):
|
||||
""" translate remote_addr to lxd (short) hostname """
|
||||
return self.get_option("remote_addr").split(".", 1)[0]
|
||||
|
||||
def _connect(self):
|
||||
"""connect to lxd (nothing to do here) """
|
||||
super(Connection, self)._connect()
|
||||
|
||||
if not self._connected:
|
||||
self._display.vvv(u"ESTABLISH LXD CONNECTION FOR USER: root", host=self.get_option('remote_addr'))
|
||||
self._display.vvv(u"ESTABLISH LXD CONNECTION FOR USER: root", host=self._host())
|
||||
self._connected = True
|
||||
|
||||
def exec_command(self, cmd, in_data=None, sudoable=True):
|
||||
""" execute a command on the lxd host """
|
||||
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
||||
|
||||
self._display.vvv(u"EXEC {0}".format(cmd), host=self.get_option('remote_addr'))
|
||||
self._display.vvv(u"EXEC {0}".format(cmd), host=self._host())
|
||||
|
||||
local_cmd = [self._lxc_cmd]
|
||||
if self.get_option("project"):
|
||||
local_cmd.extend(["--project", self.get_option("project")])
|
||||
local_cmd.extend([
|
||||
"exec",
|
||||
"%s:%s" % (self.get_option("remote"), self.get_option("remote_addr")),
|
||||
"%s:%s" % (self.get_option("remote"), self._host()),
|
||||
"--",
|
||||
self.get_option("executable"), "-c", cmd
|
||||
])
|
||||
@@ -104,11 +110,11 @@ class Connection(ConnectionBase):
|
||||
stdout = to_text(stdout)
|
||||
stderr = to_text(stderr)
|
||||
|
||||
if stderr == "error: Container is not running.\n":
|
||||
raise AnsibleConnectionFailure("container not running: %s" % self.get_option('remote_addr'))
|
||||
if "is not running" in stderr:
|
||||
raise AnsibleConnectionFailure("instance not running: %s" % self._host())
|
||||
|
||||
if stderr == "error: not found\n":
|
||||
raise AnsibleConnectionFailure("container not found: %s" % self.get_option('remote_addr'))
|
||||
if "not found" in stderr:
|
||||
raise AnsibleConnectionFailure("instance not found: %s" % self._host())
|
||||
|
||||
return process.returncode, stdout, stderr
|
||||
|
||||
@@ -116,7 +122,7 @@ class Connection(ConnectionBase):
|
||||
""" put a file from local to lxd """
|
||||
super(Connection, self).put_file(in_path, out_path)
|
||||
|
||||
self._display.vvv(u"PUT {0} TO {1}".format(in_path, out_path), host=self.get_option('remote_addr'))
|
||||
self._display.vvv(u"PUT {0} TO {1}".format(in_path, out_path), host=self._host())
|
||||
|
||||
if not os.path.isfile(to_bytes(in_path, errors='surrogate_or_strict')):
|
||||
raise AnsibleFileNotFound("input path is not a file: %s" % in_path)
|
||||
@@ -127,7 +133,7 @@ class Connection(ConnectionBase):
|
||||
local_cmd.extend([
|
||||
"file", "push",
|
||||
in_path,
|
||||
"%s:%s/%s" % (self.get_option("remote"), self.get_option("remote_addr"), out_path)
|
||||
"%s:%s/%s" % (self.get_option("remote"), self._host(), out_path)
|
||||
])
|
||||
|
||||
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
|
||||
@@ -139,14 +145,14 @@ class Connection(ConnectionBase):
|
||||
""" fetch a file from lxd to local """
|
||||
super(Connection, self).fetch_file(in_path, out_path)
|
||||
|
||||
self._display.vvv(u"FETCH {0} TO {1}".format(in_path, out_path), host=self.get_option('remote_addr'))
|
||||
self._display.vvv(u"FETCH {0} TO {1}".format(in_path, out_path), host=self._host())
|
||||
|
||||
local_cmd = [self._lxc_cmd]
|
||||
if self.get_option("project"):
|
||||
local_cmd.extend(["--project", self.get_option("project")])
|
||||
local_cmd.extend([
|
||||
"file", "pull",
|
||||
"%s:%s/%s" % (self.get_option("remote"), self.get_option("remote_addr"), in_path),
|
||||
"%s:%s/%s" % (self.get_option("remote"), self._host(), in_path),
|
||||
out_path
|
||||
])
|
||||
|
||||
|
||||
@@ -18,6 +18,12 @@ options:
|
||||
- FQDN of Nomad server.
|
||||
required: true
|
||||
type: str
|
||||
port:
|
||||
description:
|
||||
- Port of Nomad server.
|
||||
type: int
|
||||
default: 4646
|
||||
version_added: 8.0.0
|
||||
use_ssl:
|
||||
description:
|
||||
- Use TLS/SSL connection.
|
||||
|
||||
@@ -118,8 +118,6 @@ from ansible.module_utils.common.text.converters import to_text
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable, to_safe_group_name
|
||||
from ansible.module_utils.six import text_type
|
||||
|
||||
from ansible_collections.community.general.plugins.plugin_utils.unsafe import make_unsafe
|
||||
|
||||
# xmlrpc
|
||||
try:
|
||||
import xmlrpclib as xmlrpc_client
|
||||
@@ -276,9 +274,9 @@ class InventoryModule(BaseInventoryPlugin, Cacheable):
|
||||
for host in self._get_systems():
|
||||
# Get the FQDN for the host and add it to the right groups
|
||||
if self.inventory_hostname == 'system':
|
||||
hostname = make_unsafe(host['name']) # None
|
||||
hostname = host['name'] # None
|
||||
else:
|
||||
hostname = make_unsafe(host['hostname']) # None
|
||||
hostname = host['hostname'] # None
|
||||
interfaces = host['interfaces']
|
||||
|
||||
if set(host['mgmt_classes']) & set(self.include_mgmt_classes):
|
||||
@@ -298,7 +296,7 @@ class InventoryModule(BaseInventoryPlugin, Cacheable):
|
||||
if ivalue['management'] or not ivalue['static']:
|
||||
this_dns_name = ivalue.get('dns_name', None)
|
||||
if this_dns_name is not None and this_dns_name != "":
|
||||
hostname = make_unsafe(this_dns_name)
|
||||
hostname = this_dns_name
|
||||
self.display.vvvv('Set hostname to %s from %s\n' % (hostname, iname))
|
||||
|
||||
if hostname == '':
|
||||
@@ -363,18 +361,18 @@ class InventoryModule(BaseInventoryPlugin, Cacheable):
|
||||
if ip_address is None and ip_address_first is not None:
|
||||
ip_address = ip_address_first
|
||||
if ip_address is not None:
|
||||
self.inventory.set_variable(hostname, 'cobbler_ipv4_address', make_unsafe(ip_address))
|
||||
self.inventory.set_variable(hostname, 'cobbler_ipv4_address', ip_address)
|
||||
if ipv6_address is None and ipv6_address_first is not None:
|
||||
ipv6_address = ipv6_address_first
|
||||
if ipv6_address is not None:
|
||||
self.inventory.set_variable(hostname, 'cobbler_ipv6_address', make_unsafe(ipv6_address))
|
||||
self.inventory.set_variable(hostname, 'cobbler_ipv6_address', ipv6_address)
|
||||
|
||||
if self.get_option('want_facts'):
|
||||
try:
|
||||
self.inventory.set_variable(hostname, 'cobbler', make_unsafe(host))
|
||||
self.inventory.set_variable(hostname, 'cobbler', host)
|
||||
except ValueError as e:
|
||||
self.display.warning("Could not set host info for %s: %s" % (hostname, to_text(e)))
|
||||
|
||||
if self.get_option('want_ip_addresses'):
|
||||
self.inventory.set_variable(self.group, 'cobbler_ipv4_addresses', make_unsafe(ip_addresses))
|
||||
self.inventory.set_variable(self.group, 'cobbler_ipv6_addresses', make_unsafe(ipv6_addresses))
|
||||
self.inventory.set_variable(self.group, 'cobbler_ipv4_addresses', ip_addresses)
|
||||
self.inventory.set_variable(self.group, 'cobbler_ipv6_addresses', ipv6_addresses)
|
||||
|
||||
@@ -85,8 +85,6 @@ from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable
|
||||
|
||||
from ansible_collections.community.general.plugins.plugin_utils.unsafe import make_unsafe
|
||||
|
||||
try:
|
||||
import gitlab
|
||||
HAS_GITLAB = True
|
||||
@@ -108,11 +106,11 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
|
||||
else:
|
||||
runners = gl.runners.all()
|
||||
for runner in runners:
|
||||
host = make_unsafe(str(runner['id']))
|
||||
host = str(runner['id'])
|
||||
ip_address = runner['ip_address']
|
||||
host_attrs = make_unsafe(vars(gl.runners.get(runner['id']))['_attrs'])
|
||||
host_attrs = vars(gl.runners.get(runner['id']))['_attrs']
|
||||
self.inventory.add_host(host, group='gitlab_runners')
|
||||
self.inventory.set_variable(host, 'ansible_host', make_unsafe(ip_address))
|
||||
self.inventory.set_variable(host, 'ansible_host', ip_address)
|
||||
if self.get_option('verbose_output', True):
|
||||
self.inventory.set_variable(host, 'gitlab_runner_attributes', host_attrs)
|
||||
|
||||
|
||||
@@ -97,8 +97,6 @@ from ansible.plugins.inventory import BaseInventoryPlugin, Constructable
|
||||
from ansible.module_utils.urls import open_url
|
||||
from ansible.module_utils.six.moves.urllib.error import HTTPError
|
||||
|
||||
from ansible_collections.community.general.plugins.plugin_utils.unsafe import make_unsafe
|
||||
|
||||
|
||||
class InventoryModule(BaseInventoryPlugin, Constructable):
|
||||
''' Host inventory parser for ansible using Icinga2 as source. '''
|
||||
@@ -235,15 +233,15 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
|
||||
"""Convert Icinga2 API data to JSON format for Ansible"""
|
||||
groups_dict = {"_meta": {"hostvars": {}}}
|
||||
for entry in json_data:
|
||||
host_attrs = make_unsafe(entry['attrs'])
|
||||
host_attrs = entry['attrs']
|
||||
if self.inventory_attr == "name":
|
||||
host_name = make_unsafe(entry.get('name'))
|
||||
host_name = entry.get('name')
|
||||
if self.inventory_attr == "address":
|
||||
# When looking for address for inventory, if missing fallback to object name
|
||||
if host_attrs.get('address', '') != '':
|
||||
host_name = make_unsafe(host_attrs.get('address'))
|
||||
host_name = host_attrs.get('address')
|
||||
else:
|
||||
host_name = make_unsafe(entry.get('name'))
|
||||
host_name = entry.get('name')
|
||||
if self.inventory_attr == "display_name":
|
||||
host_name = host_attrs.get('display_name')
|
||||
if host_attrs['state'] == 0:
|
||||
@@ -259,7 +257,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
|
||||
# If the address attribute is populated, override ansible_host with the value
|
||||
if host_attrs.get('address') != '':
|
||||
self.inventory.set_variable(host_name, 'ansible_host', host_attrs.get('address'))
|
||||
self.inventory.set_variable(host_name, 'hostname', make_unsafe(entry.get('name')))
|
||||
self.inventory.set_variable(host_name, 'hostname', entry.get('name'))
|
||||
self.inventory.set_variable(host_name, 'display_name', host_attrs.get('display_name'))
|
||||
self.inventory.set_variable(host_name, 'state',
|
||||
host_attrs['state'])
|
||||
|
||||
@@ -124,8 +124,6 @@ compose:
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable, Cacheable
|
||||
|
||||
from ansible_collections.community.general.plugins.plugin_utils.unsafe import make_unsafe
|
||||
|
||||
|
||||
try:
|
||||
from linode_api4 import LinodeClient
|
||||
@@ -201,21 +199,20 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
def _add_instances_to_groups(self):
|
||||
"""Add instance names to their dynamic inventory groups."""
|
||||
for instance in self.instances:
|
||||
self.inventory.add_host(make_unsafe(instance.label), group=instance.group)
|
||||
self.inventory.add_host(instance.label, group=instance.group)
|
||||
|
||||
def _add_hostvars_for_instances(self):
|
||||
"""Add hostvars for instances in the dynamic inventory."""
|
||||
ip_style = self.get_option('ip_style')
|
||||
for instance in self.instances:
|
||||
hostvars = instance._raw_json
|
||||
hostname = make_unsafe(instance.label)
|
||||
for hostvar_key in hostvars:
|
||||
if ip_style == 'api' and hostvar_key in ['ipv4', 'ipv6']:
|
||||
continue
|
||||
self.inventory.set_variable(
|
||||
hostname,
|
||||
instance.label,
|
||||
hostvar_key,
|
||||
make_unsafe(hostvars[hostvar_key])
|
||||
hostvars[hostvar_key]
|
||||
)
|
||||
if ip_style == 'api':
|
||||
ips = instance.ips.ipv4.public + instance.ips.ipv4.private
|
||||
@@ -224,9 +221,9 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
|
||||
for ip_type in set(ip.type for ip in ips):
|
||||
self.inventory.set_variable(
|
||||
hostname,
|
||||
instance.label,
|
||||
ip_type,
|
||||
make_unsafe(self._ip_data([ip for ip in ips if ip.type == ip_type]))
|
||||
self._ip_data([ip for ip in ips if ip.type == ip_type])
|
||||
)
|
||||
|
||||
def _ip_data(self, ip_list):
|
||||
@@ -257,44 +254,30 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
self._add_instances_to_groups()
|
||||
self._add_hostvars_for_instances()
|
||||
for instance in self.instances:
|
||||
hostname = make_unsafe(instance.label)
|
||||
variables = self.inventory.get_host(hostname).get_vars()
|
||||
variables = self.inventory.get_host(instance.label).get_vars()
|
||||
self._add_host_to_composed_groups(
|
||||
self.get_option('groups'),
|
||||
variables,
|
||||
hostname,
|
||||
instance.label,
|
||||
strict=strict)
|
||||
self._add_host_to_keyed_groups(
|
||||
self.get_option('keyed_groups'),
|
||||
variables,
|
||||
hostname,
|
||||
instance.label,
|
||||
strict=strict)
|
||||
self._set_composite_vars(
|
||||
self.get_option('compose'),
|
||||
variables,
|
||||
hostname,
|
||||
instance.label,
|
||||
strict=strict)
|
||||
|
||||
def verify_file(self, path):
|
||||
"""Verify the Linode configuration file.
|
||||
|
||||
Return true/false if the config-file is valid for this plugin
|
||||
|
||||
Args:
|
||||
str(path): path to the config
|
||||
Kwargs:
|
||||
None
|
||||
Raises:
|
||||
None
|
||||
Returns:
|
||||
bool(valid): is valid config file"""
|
||||
valid = False
|
||||
"""Verify the Linode configuration file."""
|
||||
if super(InventoryModule, self).verify_file(path):
|
||||
if path.endswith(("linode.yaml", "linode.yml")):
|
||||
valid = True
|
||||
else:
|
||||
self.display.vvv('Inventory source not ending in "linode.yaml" or "linode.yml"')
|
||||
return valid
|
||||
endings = ('linode.yaml', 'linode.yml')
|
||||
if any((path.endswith(ending) for ending in endings)):
|
||||
return True
|
||||
return False
|
||||
|
||||
def parse(self, inventory, loader, path, cache=True):
|
||||
"""Dynamically parse Linode the cloud inventory."""
|
||||
|
||||
@@ -41,6 +41,20 @@ DOCUMENTATION = r'''
|
||||
aliases: [ cert_file ]
|
||||
default: $HOME/.config/lxc/client.crt
|
||||
type: path
|
||||
server_cert:
|
||||
description:
|
||||
- The server certificate file path.
|
||||
type: path
|
||||
version_added: 8.0.0
|
||||
server_check_hostname:
|
||||
description:
|
||||
- This option controls if the server's hostname is checked as part of the HTTPS connection verification.
|
||||
This can be useful to disable, if for example, the server certificate provided (see O(server_cert) option)
|
||||
does not cover a name matching the one used to communicate with the server. Such mismatch is common as LXD
|
||||
generates self-signed server certificates by default.
|
||||
type: bool
|
||||
default: true
|
||||
version_added: 8.0.0
|
||||
trust_password:
|
||||
description:
|
||||
- The client trusted password.
|
||||
@@ -161,7 +175,6 @@ from ansible.module_utils.six import raise_from
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||
from ansible_collections.community.general.plugins.module_utils.lxd import LXDClient, LXDClientException
|
||||
from ansible_collections.community.general.plugins.plugin_utils.unsafe import make_unsafe
|
||||
|
||||
try:
|
||||
import ipaddress
|
||||
@@ -287,7 +300,7 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
urls = (url for url in url_list if self.validate_url(url))
|
||||
for url in urls:
|
||||
try:
|
||||
socket_connection = LXDClient(url, self.client_key, self.client_cert, self.debug)
|
||||
socket_connection = LXDClient(url, self.client_key, self.client_cert, self.debug, self.server_cert, self.server_check_hostname)
|
||||
return socket_connection
|
||||
except LXDClientException as err:
|
||||
error_storage[url] = err
|
||||
@@ -457,7 +470,7 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
Helper to get the preferred interface provide by neme pattern from 'prefered_instance_network_interface'.
|
||||
|
||||
Args:
|
||||
str(instance_name): name of instance
|
||||
str(containe_name): name of instance
|
||||
Kwargs:
|
||||
None
|
||||
Raises:
|
||||
@@ -482,7 +495,7 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
Helper to get the VLAN_ID from the instance
|
||||
|
||||
Args:
|
||||
str(instance_name): name of instance
|
||||
str(containe_name): name of instance
|
||||
Kwargs:
|
||||
None
|
||||
Raises:
|
||||
@@ -657,7 +670,7 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
|
||||
if self._get_data_entry('inventory/{0}/network_interfaces'.format(instance_name)): # instance have network interfaces
|
||||
self.inventory.set_variable(instance_name, 'ansible_connection', 'ssh')
|
||||
self.inventory.set_variable(instance_name, 'ansible_host', make_unsafe(interface_selection(instance_name)))
|
||||
self.inventory.set_variable(instance_name, 'ansible_host', interface_selection(instance_name))
|
||||
else:
|
||||
self.inventory.set_variable(instance_name, 'ansible_connection', 'local')
|
||||
|
||||
@@ -683,39 +696,31 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
if self.filter.lower() != instance_state:
|
||||
continue
|
||||
# add instance
|
||||
instance_name = make_unsafe(instance_name)
|
||||
self.inventory.add_host(instance_name)
|
||||
# add network information
|
||||
self.build_inventory_network(instance_name)
|
||||
# add os
|
||||
v = self._get_data_entry('inventory/{0}/os'.format(instance_name))
|
||||
if v:
|
||||
self.inventory.set_variable(instance_name, 'ansible_lxd_os', make_unsafe(v.lower()))
|
||||
self.inventory.set_variable(instance_name, 'ansible_lxd_os', v.lower())
|
||||
# add release
|
||||
v = self._get_data_entry('inventory/{0}/release'.format(instance_name))
|
||||
if v:
|
||||
self.inventory.set_variable(
|
||||
instance_name, 'ansible_lxd_release', make_unsafe(v.lower()))
|
||||
self.inventory.set_variable(instance_name, 'ansible_lxd_release', v.lower())
|
||||
# add profile
|
||||
self.inventory.set_variable(
|
||||
instance_name, 'ansible_lxd_profile', make_unsafe(self._get_data_entry('inventory/{0}/profile'.format(instance_name))))
|
||||
self.inventory.set_variable(instance_name, 'ansible_lxd_profile', self._get_data_entry('inventory/{0}/profile'.format(instance_name)))
|
||||
# add state
|
||||
self.inventory.set_variable(
|
||||
instance_name, 'ansible_lxd_state', make_unsafe(instance_state))
|
||||
self.inventory.set_variable(instance_name, 'ansible_lxd_state', instance_state)
|
||||
# add type
|
||||
self.inventory.set_variable(
|
||||
instance_name, 'ansible_lxd_type', make_unsafe(self._get_data_entry('inventory/{0}/type'.format(instance_name))))
|
||||
self.inventory.set_variable(instance_name, 'ansible_lxd_type', self._get_data_entry('inventory/{0}/type'.format(instance_name)))
|
||||
# add location information
|
||||
if self._get_data_entry('inventory/{0}/location'.format(instance_name)) != "none": # wrong type by lxd 'none' != 'None'
|
||||
self.inventory.set_variable(
|
||||
instance_name, 'ansible_lxd_location', make_unsafe(self._get_data_entry('inventory/{0}/location'.format(instance_name))))
|
||||
self.inventory.set_variable(instance_name, 'ansible_lxd_location', self._get_data_entry('inventory/{0}/location'.format(instance_name)))
|
||||
# add VLAN_ID information
|
||||
if self._get_data_entry('inventory/{0}/vlan_ids'.format(instance_name)):
|
||||
self.inventory.set_variable(
|
||||
instance_name, 'ansible_lxd_vlan_ids', make_unsafe(self._get_data_entry('inventory/{0}/vlan_ids'.format(instance_name))))
|
||||
self.inventory.set_variable(instance_name, 'ansible_lxd_vlan_ids', self._get_data_entry('inventory/{0}/vlan_ids'.format(instance_name)))
|
||||
# add project
|
||||
self.inventory.set_variable(
|
||||
instance_name, 'ansible_lxd_project', make_unsafe(self._get_data_entry('inventory/{0}/project'.format(instance_name))))
|
||||
self.inventory.set_variable(instance_name, 'ansible_lxd_project', self._get_data_entry('inventory/{0}/project'.format(instance_name)))
|
||||
|
||||
def build_inventory_groups_location(self, group_name):
|
||||
"""create group by attribute: location
|
||||
@@ -988,7 +993,7 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
for group_name in self.groupby:
|
||||
if not group_name.isalnum():
|
||||
raise AnsibleParserError('Invalid character(s) in groupname: {0}'.format(to_native(group_name)))
|
||||
group_type(make_unsafe(group_name))
|
||||
group_type(group_name)
|
||||
|
||||
def build_inventory(self):
|
||||
"""Build dynamic inventory
|
||||
@@ -1087,6 +1092,8 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
try:
|
||||
self.client_key = self.get_option('client_key')
|
||||
self.client_cert = self.get_option('client_cert')
|
||||
self.server_cert = self.get_option('server_cert')
|
||||
self.server_check_hostname = self.get_option('server_check_hostname')
|
||||
self.project = self.get_option('project')
|
||||
self.debug = self.DEBUG
|
||||
self.data = {} # store for inventory-data
|
||||
|
||||
@@ -127,8 +127,6 @@ from ansible.module_utils.common.text.converters import to_native, to_text
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable, Cacheable
|
||||
from ansible.module_utils.common.process import get_bin_path
|
||||
|
||||
from ansible_collections.community.general.plugins.plugin_utils.unsafe import make_unsafe
|
||||
|
||||
|
||||
class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
|
||||
@@ -145,7 +143,6 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
strict = self.get_option('strict')
|
||||
|
||||
for host in hosts:
|
||||
host = make_unsafe(host)
|
||||
hostname = host['name']
|
||||
self.inventory.add_host(hostname)
|
||||
for var, value in host.items():
|
||||
|
||||
@@ -69,8 +69,6 @@ from ansible.module_utils.common.text.converters import to_text
|
||||
from ansible.module_utils.ansible_release import __version__ as ansible_version
|
||||
from ansible.module_utils.six.moves.urllib.parse import urljoin
|
||||
|
||||
from ansible_collections.community.general.plugins.plugin_utils.unsafe import make_unsafe
|
||||
|
||||
|
||||
class InventoryModule(BaseInventoryPlugin):
|
||||
NAME = 'community.general.online'
|
||||
@@ -171,20 +169,20 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
"support"
|
||||
)
|
||||
for attribute in targeted_attributes:
|
||||
self.inventory.set_variable(hostname, attribute, make_unsafe(host_infos[attribute]))
|
||||
self.inventory.set_variable(hostname, attribute, host_infos[attribute])
|
||||
|
||||
if self.extract_public_ipv4(host_infos=host_infos):
|
||||
self.inventory.set_variable(hostname, "public_ipv4", make_unsafe(self.extract_public_ipv4(host_infos=host_infos)))
|
||||
self.inventory.set_variable(hostname, "ansible_host", make_unsafe(self.extract_public_ipv4(host_infos=host_infos)))
|
||||
self.inventory.set_variable(hostname, "public_ipv4", self.extract_public_ipv4(host_infos=host_infos))
|
||||
self.inventory.set_variable(hostname, "ansible_host", self.extract_public_ipv4(host_infos=host_infos))
|
||||
|
||||
if self.extract_private_ipv4(host_infos=host_infos):
|
||||
self.inventory.set_variable(hostname, "public_ipv4", make_unsafe(self.extract_private_ipv4(host_infos=host_infos)))
|
||||
self.inventory.set_variable(hostname, "public_ipv4", self.extract_private_ipv4(host_infos=host_infos))
|
||||
|
||||
if self.extract_os_name(host_infos=host_infos):
|
||||
self.inventory.set_variable(hostname, "os_name", make_unsafe(self.extract_os_name(host_infos=host_infos)))
|
||||
self.inventory.set_variable(hostname, "os_name", self.extract_os_name(host_infos=host_infos))
|
||||
|
||||
if self.extract_os_version(host_infos=host_infos):
|
||||
self.inventory.set_variable(hostname, "os_version", make_unsafe(self.extract_os_name(host_infos=host_infos)))
|
||||
self.inventory.set_variable(hostname, "os_version", self.extract_os_name(host_infos=host_infos))
|
||||
|
||||
def _filter_host(self, host_infos, hostname_preferences):
|
||||
|
||||
@@ -203,8 +201,6 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
if not hostname:
|
||||
return
|
||||
|
||||
hostname = make_unsafe(hostname)
|
||||
|
||||
self.inventory.add_host(host=hostname)
|
||||
self._fill_host_variables(hostname=hostname, host_infos=host_infos)
|
||||
|
||||
@@ -214,8 +210,6 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
if not group:
|
||||
return
|
||||
|
||||
group = make_unsafe(group)
|
||||
|
||||
self.inventory.add_group(group=group)
|
||||
self.inventory.add_host(group=group, host=hostname)
|
||||
|
||||
|
||||
@@ -98,8 +98,6 @@ from ansible.errors import AnsibleError
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
from ansible_collections.community.general.plugins.plugin_utils.unsafe import make_unsafe
|
||||
|
||||
from collections import namedtuple
|
||||
import os
|
||||
|
||||
@@ -217,7 +215,6 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
|
||||
filter_by_label = self.get_option('filter_by_label')
|
||||
servers = self._retrieve_servers(filter_by_label)
|
||||
for server in servers:
|
||||
server = make_unsafe(server)
|
||||
hostname = server['name']
|
||||
# check for labels
|
||||
if group_by_labels and server['LABELS']:
|
||||
|
||||
@@ -224,7 +224,6 @@ from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||
from ansible.utils.display import Display
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||
from ansible_collections.community.general.plugins.plugin_utils.unsafe import make_unsafe
|
||||
|
||||
# 3rd party imports
|
||||
try:
|
||||
@@ -331,7 +330,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
|
||||
self._cache[self.cache_key][url] = data
|
||||
|
||||
return make_unsafe(self._cache[self.cache_key][url])
|
||||
return self._cache[self.cache_key][url]
|
||||
|
||||
def _get_nodes(self):
|
||||
return self._get_json("%s/api2/json/nodes" % self.proxmox_url)
|
||||
|
||||
@@ -121,7 +121,6 @@ else:
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable
|
||||
from ansible_collections.community.general.plugins.module_utils.scaleway import SCALEWAY_LOCATION, parse_pagination_link
|
||||
from ansible_collections.community.general.plugins.plugin_utils.unsafe import make_unsafe
|
||||
from ansible.module_utils.urls import open_url
|
||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||
from ansible.module_utils.six import raise_from
|
||||
@@ -280,7 +279,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
|
||||
zone_info = SCALEWAY_LOCATION[zone]
|
||||
|
||||
url = _build_server_url(zone_info["api_endpoint"])
|
||||
raw_zone_hosts_infos = make_unsafe(_fetch_information(url=url, token=token))
|
||||
raw_zone_hosts_infos = _fetch_information(url=url, token=token)
|
||||
|
||||
for host_infos in raw_zone_hosts_infos:
|
||||
|
||||
@@ -342,4 +341,4 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
|
||||
hostname_preference = self.get_option("hostnames")
|
||||
|
||||
for zone in self._get_zones(config_zones):
|
||||
self.do_zone_inventory(zone=make_unsafe(zone), token=token, tags=tags, hostname_preferences=hostname_preference)
|
||||
self.do_zone_inventory(zone=zone, token=token, tags=tags, hostname_preferences=hostname_preference)
|
||||
|
||||
@@ -73,8 +73,6 @@ from ansible.plugins.inventory import (
|
||||
)
|
||||
from ansible.utils.display import Display
|
||||
|
||||
from ansible_collections.community.general.plugins.plugin_utils.unsafe import make_unsafe
|
||||
|
||||
|
||||
display = Display()
|
||||
|
||||
@@ -273,7 +271,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
if not cache or cache_needs_update:
|
||||
results = self._query()
|
||||
|
||||
self._populate(make_unsafe(results))
|
||||
self._populate(results)
|
||||
|
||||
# If the cache has expired/doesn't exist or
|
||||
# if refresh_inventory/flush cache is used
|
||||
|
||||
@@ -63,8 +63,6 @@ from ansible.module_utils.common._collections_compat import MutableMapping
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable, Cacheable
|
||||
from ansible.module_utils.common.process import get_bin_path
|
||||
|
||||
from ansible_collections.community.general.plugins.plugin_utils.unsafe import make_unsafe
|
||||
|
||||
|
||||
class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
''' Host inventory parser for ansible using local virtualbox. '''
|
||||
@@ -118,7 +116,6 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
self._add_host_to_keyed_groups(self.get_option('keyed_groups'), hostvars[host], host, strict=strict)
|
||||
|
||||
def _populate_from_cache(self, source_data):
|
||||
source_data = make_unsafe(source_data)
|
||||
hostvars = source_data.pop('_meta', {}).get('hostvars', {})
|
||||
for group in source_data:
|
||||
if group == 'all':
|
||||
@@ -165,7 +162,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
v = v.strip()
|
||||
# found host
|
||||
if k.startswith('Name') and ',' not in v: # some setting strings appear in Name
|
||||
current_host = make_unsafe(v)
|
||||
current_host = v
|
||||
if current_host not in hostvars:
|
||||
hostvars[current_host] = {}
|
||||
self.inventory.add_host(current_host)
|
||||
@@ -173,13 +170,12 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
# try to get network info
|
||||
netdata = self._query_vbox_data(current_host, netinfo)
|
||||
if netdata:
|
||||
self.inventory.set_variable(current_host, 'ansible_host', make_unsafe(netdata))
|
||||
self.inventory.set_variable(current_host, 'ansible_host', netdata)
|
||||
|
||||
# found groups
|
||||
elif k == 'Groups':
|
||||
for group in v.split('/'):
|
||||
if group:
|
||||
group = make_unsafe(group)
|
||||
group = self.inventory.add_group(group)
|
||||
self.inventory.add_child(group, current_host)
|
||||
if group not in cacheable_results:
|
||||
@@ -189,17 +185,17 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
|
||||
else:
|
||||
# found vars, accumulate in hostvars for clean inventory set
|
||||
pref_k = make_unsafe('vbox_' + k.strip().replace(' ', '_'))
|
||||
pref_k = 'vbox_' + k.strip().replace(' ', '_')
|
||||
leading_spaces = len(k) - len(k.lstrip(' '))
|
||||
if 0 < leading_spaces <= 2:
|
||||
if prevkey not in hostvars[current_host] or not isinstance(hostvars[current_host][prevkey], dict):
|
||||
hostvars[current_host][prevkey] = {}
|
||||
hostvars[current_host][prevkey][pref_k] = make_unsafe(v)
|
||||
hostvars[current_host][prevkey][pref_k] = v
|
||||
elif leading_spaces > 2:
|
||||
continue
|
||||
else:
|
||||
if v != '':
|
||||
hostvars[current_host][pref_k] = make_unsafe(v)
|
||||
hostvars[current_host][pref_k] = v
|
||||
if self._ungrouped_host(current_host, cacheable_results):
|
||||
if 'ungrouped' not in cacheable_results:
|
||||
cacheable_results['ungrouped'] = {'hosts': []}
|
||||
|
||||
@@ -84,7 +84,6 @@ from ansible.errors import AnsibleError
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable, Cacheable
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||
from ansible_collections.community.general.plugins.plugin_utils.unsafe import make_unsafe
|
||||
|
||||
# 3rd party imports
|
||||
try:
|
||||
@@ -348,4 +347,4 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
self.protocol = 'ws'
|
||||
|
||||
objects = self._get_objects()
|
||||
self._populate(make_unsafe(objects))
|
||||
self._populate(objects)
|
||||
|
||||
@@ -25,10 +25,7 @@ DOCUMENTATION = """
|
||||
type: list
|
||||
elements: str
|
||||
search:
|
||||
description:
|
||||
- Field to retrieve, for example V(name) or V(id).
|
||||
- If set to V(id), only zero or one element can be returned.
|
||||
Use the Jinja C(first) filter to get the only list element.
|
||||
description: Field to retrieve, for example V(name) or V(id).
|
||||
type: str
|
||||
default: name
|
||||
version_added: 5.7.0
|
||||
@@ -42,27 +39,27 @@ DOCUMENTATION = """
|
||||
"""
|
||||
|
||||
EXAMPLES = """
|
||||
- name: "Get 'password' from all Bitwarden records named 'a_test'"
|
||||
- name: "Get 'password' from Bitwarden record named 'a_test'"
|
||||
ansible.builtin.debug:
|
||||
msg: >-
|
||||
{{ lookup('community.general.bitwarden', 'a_test', field='password') }}
|
||||
|
||||
- name: "Get 'password' from Bitwarden record with ID 'bafba515-af11-47e6-abe3-af1200cd18b2'"
|
||||
- name: "Get 'password' from Bitwarden record with id 'bafba515-af11-47e6-abe3-af1200cd18b2'"
|
||||
ansible.builtin.debug:
|
||||
msg: >-
|
||||
{{ lookup('community.general.bitwarden', 'bafba515-af11-47e6-abe3-af1200cd18b2', search='id', field='password') | first }}
|
||||
{{ lookup('community.general.bitwarden', 'bafba515-af11-47e6-abe3-af1200cd18b2', search='id', field='password') }}
|
||||
|
||||
- name: "Get 'password' from all Bitwarden records named 'a_test' from collection"
|
||||
- name: "Get 'password' from Bitwarden record named 'a_test' from collection"
|
||||
ansible.builtin.debug:
|
||||
msg: >-
|
||||
{{ lookup('community.general.bitwarden', 'a_test', field='password', collection_id='bafba515-af11-47e6-abe3-af1200cd18b2') }}
|
||||
|
||||
- name: "Get list of all full Bitwarden records named 'a_test'"
|
||||
- name: "Get full Bitwarden record named 'a_test'"
|
||||
ansible.builtin.debug:
|
||||
msg: >-
|
||||
{{ lookup('community.general.bitwarden', 'a_test') }}
|
||||
|
||||
- name: "Get custom field 'api_key' from all Bitwarden records named 'a_test'"
|
||||
- name: "Get custom field 'api_key' from Bitwarden record named 'a_test'"
|
||||
ansible.builtin.debug:
|
||||
msg: >-
|
||||
{{ lookup('community.general.bitwarden', 'a_test', field='api_key') }}
|
||||
@@ -70,12 +67,9 @@ EXAMPLES = """
|
||||
|
||||
RETURN = """
|
||||
_raw:
|
||||
description:
|
||||
- A one-element list that contains a list of requested fields or JSON objects of matches.
|
||||
- If you use C(query), you get a list of lists. If you use C(lookup) without C(wantlist=true),
|
||||
this always gets reduced to a list of field values or JSON objects.
|
||||
description: List of requested field or JSON object of list of matches.
|
||||
type: list
|
||||
elements: list
|
||||
elements: raw
|
||||
"""
|
||||
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
@@ -70,7 +70,6 @@ RETURN = """
|
||||
"""
|
||||
|
||||
from subprocess import Popen, PIPE
|
||||
from time import sleep
|
||||
|
||||
from ansible.errors import AnsibleLookupError
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
@@ -85,29 +84,11 @@ class BitwardenSecretsManagerException(AnsibleLookupError):
|
||||
class BitwardenSecretsManager(object):
|
||||
def __init__(self, path='bws'):
|
||||
self._cli_path = path
|
||||
self._max_retries = 3
|
||||
self._retry_delay = 1
|
||||
|
||||
@property
|
||||
def cli_path(self):
|
||||
return self._cli_path
|
||||
|
||||
def _run_with_retry(self, args, stdin=None, retries=0):
|
||||
out, err, rc = self._run(args, stdin)
|
||||
|
||||
if rc != 0:
|
||||
if retries >= self._max_retries:
|
||||
raise BitwardenSecretsManagerException("Max retries exceeded. Unable to retrieve secret.")
|
||||
|
||||
if "Too many requests" in err:
|
||||
delay = self._retry_delay * (2 ** retries)
|
||||
sleep(delay)
|
||||
return self._run_with_retry(args, stdin, retries + 1)
|
||||
else:
|
||||
raise BitwardenSecretsManagerException("Command failed with return code {rc}: {err}".format(rc=rc, err=err))
|
||||
|
||||
return out, err, rc
|
||||
|
||||
def _run(self, args, stdin=None):
|
||||
p = Popen([self.cli_path] + args, stdout=PIPE, stderr=PIPE, stdin=PIPE)
|
||||
out, err = p.communicate(stdin)
|
||||
@@ -126,7 +107,7 @@ class BitwardenSecretsManager(object):
|
||||
'get', 'secret', secret_id
|
||||
]
|
||||
|
||||
out, err, rc = self._run_with_retry(params)
|
||||
out, err, rc = self._run(params)
|
||||
if rc != 0:
|
||||
raise BitwardenSecretsManagerException(to_text(err))
|
||||
|
||||
|
||||
@@ -98,15 +98,10 @@ def load_collection_meta(collection_pkg, no_version='*'):
|
||||
if os.path.exists(manifest_path):
|
||||
return load_collection_meta_manifest(manifest_path)
|
||||
|
||||
# Try to load galaxy.y(a)ml
|
||||
# Try to load galaxy.yml
|
||||
galaxy_path = os.path.join(path, 'galaxy.yml')
|
||||
galaxy_alt_path = os.path.join(path, 'galaxy.yaml')
|
||||
# galaxy.yaml was only supported in ansible-base 2.10 and ansible-core 2.11. Support was removed
|
||||
# in https://github.com/ansible/ansible/commit/595413d11346b6f26bb3d9df2d8e05f2747508a3 for
|
||||
# ansible-core 2.12.
|
||||
for path in (galaxy_path, galaxy_alt_path):
|
||||
if os.path.exists(path):
|
||||
return load_collection_meta_galaxy(path, no_version=no_version)
|
||||
if os.path.exists(galaxy_path):
|
||||
return load_collection_meta_galaxy(galaxy_path, no_version=no_version)
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ DOCUMENTATION = """
|
||||
- Roy Lenferink (@rlenferink)
|
||||
- Mark Ettema (@m-a-r-k-e)
|
||||
name: merge_variables
|
||||
short_description: merge variables whose names match a given pattern
|
||||
short_description: merge variables with a certain suffix
|
||||
description:
|
||||
- This lookup returns the merged result of all variables in scope that match the given prefixes, suffixes, or
|
||||
regular expressions, optionally.
|
||||
|
||||
@@ -127,14 +127,6 @@ from ansible.module_utils.six import with_metaclass
|
||||
from ansible_collections.community.general.plugins.module_utils.onepassword import OnePasswordConfig
|
||||
|
||||
|
||||
def _lower_if_possible(value):
|
||||
"""Return the lower case version value, otherwise return the value"""
|
||||
try:
|
||||
return value.lower()
|
||||
except AttributeError:
|
||||
return value
|
||||
|
||||
|
||||
class OnePassCLIBase(with_metaclass(abc.ABCMeta, object)):
|
||||
bin = "op"
|
||||
|
||||
@@ -488,7 +480,6 @@ class OnePassCLIv2(OnePassCLIBase):
|
||||
}
|
||||
"""
|
||||
data = json.loads(data_json)
|
||||
field_name = _lower_if_possible(field_name)
|
||||
for field in data.get("fields", []):
|
||||
if section_title is None:
|
||||
# If the field name exists in the section, return that value
|
||||
@@ -497,25 +488,23 @@ class OnePassCLIv2(OnePassCLIBase):
|
||||
|
||||
# If the field name doesn't exist in the section, match on the value of "label"
|
||||
# then "id" and return "value"
|
||||
if field.get("label", "").lower() == field_name:
|
||||
if field.get("label") == field_name:
|
||||
return field.get("value", "")
|
||||
|
||||
if field.get("id", "").lower() == field_name:
|
||||
if field.get("id") == field_name:
|
||||
return field.get("value", "")
|
||||
|
||||
# Look at the section data and get an identifier. The value of 'id' is either a unique ID
|
||||
# or a human-readable string. If a 'label' field exists, prefer that since
|
||||
# it is the value visible in the 1Password UI when both 'id' and 'label' exist.
|
||||
section = field.get("section", {})
|
||||
section_title = _lower_if_possible(section_title)
|
||||
|
||||
current_section_title = section.get("label", section.get("id", "")).lower()
|
||||
current_section_title = section.get("label", section.get("id"))
|
||||
if section_title == current_section_title:
|
||||
# In the correct section. Check "label" then "id" for the desired field_name
|
||||
if field.get("label", "").lower() == field_name:
|
||||
if field.get("label") == field_name:
|
||||
return field.get("value", "")
|
||||
|
||||
if field.get("id", "").lower() == field_name:
|
||||
if field.get("id") == field_name:
|
||||
return field.get("value", "")
|
||||
|
||||
return ""
|
||||
|
||||
@@ -21,30 +21,15 @@ except ImportError:
|
||||
|
||||
import traceback
|
||||
|
||||
|
||||
def _determine_list_all_kwargs(version):
|
||||
gitlab_version = LooseVersion(version)
|
||||
if gitlab_version >= LooseVersion('4.0.0'):
|
||||
# 4.0.0 removed 'as_list'
|
||||
return {'iterator': True, 'per_page': 100}
|
||||
elif gitlab_version >= LooseVersion('3.7.0'):
|
||||
# 3.7.0 added 'get_all'
|
||||
return {'as_list': False, 'get_all': True, 'per_page': 100}
|
||||
else:
|
||||
return {'as_list': False, 'all': True, 'per_page': 100}
|
||||
|
||||
|
||||
GITLAB_IMP_ERR = None
|
||||
try:
|
||||
import gitlab
|
||||
import requests
|
||||
HAS_GITLAB_PACKAGE = True
|
||||
list_all_kwargs = _determine_list_all_kwargs(gitlab.__version__)
|
||||
except Exception:
|
||||
gitlab = None
|
||||
GITLAB_IMP_ERR = traceback.format_exc()
|
||||
HAS_GITLAB_PACKAGE = False
|
||||
list_all_kwargs = {}
|
||||
|
||||
|
||||
def auth_argument_spec(spec=None):
|
||||
|
||||
@@ -1679,7 +1679,7 @@ class KeycloakAPI(object):
|
||||
:param name: Name of the role to fetch.
|
||||
:param realm: Realm in which the role resides; default 'master'.
|
||||
"""
|
||||
role_url = URL_REALM_ROLE.format(url=self.baseurl, realm=realm, name=quote(name, safe=''))
|
||||
role_url = URL_REALM_ROLE.format(url=self.baseurl, realm=realm, name=quote(name))
|
||||
try:
|
||||
return json.loads(to_native(open_url(role_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
@@ -1716,7 +1716,7 @@ class KeycloakAPI(object):
|
||||
:param rolerep: A RoleRepresentation of the updated role.
|
||||
:return HTTPResponse object on success
|
||||
"""
|
||||
role_url = URL_REALM_ROLE.format(url=self.baseurl, realm=realm, name=quote(rolerep['name']), safe='')
|
||||
role_url = URL_REALM_ROLE.format(url=self.baseurl, realm=realm, name=quote(rolerep['name']))
|
||||
try:
|
||||
composites = None
|
||||
if "composites" in rolerep:
|
||||
@@ -1737,9 +1737,9 @@ class KeycloakAPI(object):
|
||||
if clientid is not None:
|
||||
client = self.get_client_by_clientid(client_id=clientid, realm=realm)
|
||||
cid = client['id']
|
||||
composite_url = URL_CLIENT_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, id=cid, name=quote(rolerep["name"], safe=''))
|
||||
composite_url = URL_CLIENT_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, id=cid, name=quote(rolerep["name"]))
|
||||
else:
|
||||
composite_url = URL_REALM_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, name=quote(rolerep["name"], safe=''))
|
||||
composite_url = URL_REALM_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, name=quote(rolerep["name"]))
|
||||
# Get existing composites
|
||||
return json.loads(to_native(open_url(
|
||||
composite_url,
|
||||
@@ -1758,9 +1758,9 @@ class KeycloakAPI(object):
|
||||
if clientid is not None:
|
||||
client = self.get_client_by_clientid(client_id=clientid, realm=realm)
|
||||
cid = client['id']
|
||||
composite_url = URL_CLIENT_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, id=cid, name=quote(rolerep["name"], safe=''))
|
||||
composite_url = URL_CLIENT_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, id=cid, name=quote(rolerep["name"]))
|
||||
else:
|
||||
composite_url = URL_REALM_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, name=quote(rolerep["name"], safe=''))
|
||||
composite_url = URL_REALM_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, name=quote(rolerep["name"]))
|
||||
# Get existing composites
|
||||
# create new composites
|
||||
return open_url(composite_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
||||
@@ -1775,9 +1775,9 @@ class KeycloakAPI(object):
|
||||
if clientid is not None:
|
||||
client = self.get_client_by_clientid(client_id=clientid, realm=realm)
|
||||
cid = client['id']
|
||||
composite_url = URL_CLIENT_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, id=cid, name=quote(rolerep["name"], safe=''))
|
||||
composite_url = URL_CLIENT_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, id=cid, name=quote(rolerep["name"]))
|
||||
else:
|
||||
composite_url = URL_REALM_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, name=quote(rolerep["name"], safe=''))
|
||||
composite_url = URL_REALM_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, name=quote(rolerep["name"]))
|
||||
# Get existing composites
|
||||
# create new composites
|
||||
return open_url(composite_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
||||
@@ -1842,7 +1842,7 @@ class KeycloakAPI(object):
|
||||
:param name: The name of the role.
|
||||
:param realm: The realm in which this role resides, default "master".
|
||||
"""
|
||||
role_url = URL_REALM_ROLE.format(url=self.baseurl, realm=realm, name=quote(name, safe=''))
|
||||
role_url = URL_REALM_ROLE.format(url=self.baseurl, realm=realm, name=quote(name))
|
||||
try:
|
||||
return open_url(role_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs)
|
||||
@@ -1886,7 +1886,7 @@ class KeycloakAPI(object):
|
||||
if cid is None:
|
||||
self.module.fail_json(msg='Could not find client %s in realm %s'
|
||||
% (clientid, realm))
|
||||
role_url = URL_CLIENT_ROLE.format(url=self.baseurl, realm=realm, id=cid, name=quote(name, safe=''))
|
||||
role_url = URL_CLIENT_ROLE.format(url=self.baseurl, realm=realm, id=cid, name=quote(name))
|
||||
try:
|
||||
return json.loads(to_native(open_url(role_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
@@ -1950,7 +1950,7 @@ class KeycloakAPI(object):
|
||||
if cid is None:
|
||||
self.module.fail_json(msg='Could not find client %s in realm %s'
|
||||
% (clientid, realm))
|
||||
role_url = URL_CLIENT_ROLE.format(url=self.baseurl, realm=realm, id=cid, name=quote(rolerep['name'], safe=''))
|
||||
role_url = URL_CLIENT_ROLE.format(url=self.baseurl, realm=realm, id=cid, name=quote(rolerep['name']))
|
||||
try:
|
||||
composites = None
|
||||
if "composites" in rolerep:
|
||||
@@ -1976,7 +1976,7 @@ class KeycloakAPI(object):
|
||||
if cid is None:
|
||||
self.module.fail_json(msg='Could not find client %s in realm %s'
|
||||
% (clientid, realm))
|
||||
role_url = URL_CLIENT_ROLE.format(url=self.baseurl, realm=realm, id=cid, name=quote(name, safe=''))
|
||||
role_url = URL_CLIENT_ROLE.format(url=self.baseurl, realm=realm, id=cid, name=quote(name))
|
||||
try:
|
||||
return open_url(role_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs)
|
||||
@@ -2036,7 +2036,7 @@ class KeycloakAPI(object):
|
||||
URL_AUTHENTICATION_FLOW_COPY.format(
|
||||
url=self.baseurl,
|
||||
realm=realm,
|
||||
copyfrom=quote(config["copyFrom"], safe='')),
|
||||
copyfrom=quote(config["copyFrom"])),
|
||||
method='POST',
|
||||
http_agent=self.http_agent, headers=self.restheaders,
|
||||
data=json.dumps(new_name),
|
||||
@@ -2110,7 +2110,7 @@ class KeycloakAPI(object):
|
||||
URL_AUTHENTICATION_FLOW_EXECUTIONS.format(
|
||||
url=self.baseurl,
|
||||
realm=realm,
|
||||
flowalias=quote(flowAlias, safe='')),
|
||||
flowalias=quote(flowAlias)),
|
||||
method='PUT',
|
||||
http_agent=self.http_agent, headers=self.restheaders,
|
||||
data=json.dumps(updatedExec),
|
||||
@@ -2159,7 +2159,7 @@ class KeycloakAPI(object):
|
||||
URL_AUTHENTICATION_FLOW_EXECUTIONS_FLOW.format(
|
||||
url=self.baseurl,
|
||||
realm=realm,
|
||||
flowalias=quote(flowAlias, safe='')),
|
||||
flowalias=quote(flowAlias)),
|
||||
method='POST',
|
||||
http_agent=self.http_agent, headers=self.restheaders,
|
||||
data=json.dumps(newSubFlow),
|
||||
@@ -2183,7 +2183,7 @@ class KeycloakAPI(object):
|
||||
URL_AUTHENTICATION_FLOW_EXECUTIONS_EXECUTION.format(
|
||||
url=self.baseurl,
|
||||
realm=realm,
|
||||
flowalias=quote(flowAlias, safe='')),
|
||||
flowalias=quote(flowAlias)),
|
||||
method='POST',
|
||||
http_agent=self.http_agent, headers=self.restheaders,
|
||||
data=json.dumps(newExec),
|
||||
@@ -2243,7 +2243,7 @@ class KeycloakAPI(object):
|
||||
URL_AUTHENTICATION_FLOW_EXECUTIONS.format(
|
||||
url=self.baseurl,
|
||||
realm=realm,
|
||||
flowalias=quote(config["alias"], safe='')),
|
||||
flowalias=quote(config["alias"])),
|
||||
method='GET',
|
||||
http_agent=self.http_agent, headers=self.restheaders,
|
||||
timeout=self.connection_timeout,
|
||||
@@ -2336,7 +2336,7 @@ class KeycloakAPI(object):
|
||||
return open_url(
|
||||
URL_AUTHENTICATION_REQUIRED_ACTIONS_ALIAS.format(
|
||||
url=self.baseurl,
|
||||
alias=quote(alias, safe=''),
|
||||
alias=quote(alias),
|
||||
realm=realm
|
||||
),
|
||||
method='PUT',
|
||||
@@ -2363,7 +2363,7 @@ class KeycloakAPI(object):
|
||||
return open_url(
|
||||
URL_AUTHENTICATION_REQUIRED_ACTIONS_ALIAS.format(
|
||||
url=self.baseurl,
|
||||
alias=quote(alias, safe=''),
|
||||
alias=quote(alias),
|
||||
realm=realm
|
||||
),
|
||||
method='DELETE',
|
||||
@@ -2630,7 +2630,7 @@ class KeycloakAPI(object):
|
||||
|
||||
def get_authz_authorization_scope_by_name(self, name, client_id, realm):
|
||||
url = URL_AUTHZ_AUTHORIZATION_SCOPES.format(url=self.baseurl, client_id=client_id, realm=realm)
|
||||
search_url = "%s/search?name=%s" % (url, quote(name, safe=''))
|
||||
search_url = "%s/search?name=%s" % (url, quote(name))
|
||||
|
||||
try:
|
||||
return json.loads(to_native(open_url(search_url, method='GET', http_agent=self.http_agent, headers=self.restheaders,
|
||||
|
||||
@@ -104,7 +104,7 @@ class IPAClient(object):
|
||||
|
||||
def get_ipa_version(self):
|
||||
response = self.ping()['summary']
|
||||
ipa_ver_regex = re.compile(r'IPA server version (\d+\.\d+\.\d+).*')
|
||||
ipa_ver_regex = re.compile(r'IPA server version (\d\.\d\.\d).*')
|
||||
version_match = ipa_ver_regex.match(response)
|
||||
ipa_version = None
|
||||
if version_match:
|
||||
|
||||
@@ -139,7 +139,5 @@ class LdapGeneric(object):
|
||||
|
||||
def _xorder_dn(self):
|
||||
# match X_ORDERed DNs
|
||||
regex = r".+\{\d+\}.+"
|
||||
explode_dn = ldap.dn.explode_dn(self.module.params['dn'])
|
||||
|
||||
return re.match(regex, explode_dn[0]) is not None
|
||||
regex = r"\w+=\{\d+\}.+"
|
||||
return re.match(regex, self.module.params['dn']) is not None
|
||||
|
||||
@@ -41,7 +41,7 @@ class LXDClientException(Exception):
|
||||
|
||||
|
||||
class LXDClient(object):
|
||||
def __init__(self, url, key_file=None, cert_file=None, debug=False):
|
||||
def __init__(self, url, key_file=None, cert_file=None, debug=False, server_cert_file=None, server_check_hostname=True):
|
||||
"""LXD Client.
|
||||
|
||||
:param url: The URL of the LXD server. (e.g. unix:/var/lib/lxd/unix.socket or https://127.0.0.1)
|
||||
@@ -52,6 +52,10 @@ class LXDClient(object):
|
||||
:type cert_file: ``str``
|
||||
:param debug: The debug flag. The request and response are stored in logs when debug is true.
|
||||
:type debug: ``bool``
|
||||
:param server_cert_file: The path of the server certificate file.
|
||||
:type server_cert_file: ``str``
|
||||
:param server_check_hostname: Whether to check the server's hostname as part of TLS verification.
|
||||
:type debug: ``bool``
|
||||
"""
|
||||
self.url = url
|
||||
self.debug = debug
|
||||
@@ -61,6 +65,10 @@ class LXDClient(object):
|
||||
self.key_file = key_file
|
||||
parts = generic_urlparse(urlparse(self.url))
|
||||
ctx = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
|
||||
if server_cert_file:
|
||||
# Check that the received cert is signed by the provided server_cert_file
|
||||
ctx.load_verify_locations(cafile=server_cert_file)
|
||||
ctx.check_hostname = server_check_hostname
|
||||
ctx.load_cert_chain(cert_file, keyfile=key_file)
|
||||
self.connection = HTTPSConnection(parts.get('netloc'), context=ctx)
|
||||
elif url.startswith('unix:'):
|
||||
|
||||
@@ -1,205 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# (c) 2020, Alexei Znamensky <russoz@gmail.com>
|
||||
# Copyright (c) 2020, Ansible Project
|
||||
# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause)
|
||||
# SPDX-License-Identifier: BSD-2-Clause
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
from functools import partial
|
||||
|
||||
|
||||
class ArgFormat(object):
|
||||
"""
|
||||
Argument formatter for use as a command line parameter. Used in CmdMixin.
|
||||
"""
|
||||
BOOLEAN = 0
|
||||
PRINTF = 1
|
||||
FORMAT = 2
|
||||
BOOLEAN_NOT = 3
|
||||
|
||||
@staticmethod
|
||||
def stars_deco(num):
|
||||
if num == 1:
|
||||
def deco(f):
|
||||
return lambda v: f(*v)
|
||||
return deco
|
||||
elif num == 2:
|
||||
def deco(f):
|
||||
return lambda v: f(**v)
|
||||
return deco
|
||||
|
||||
return lambda f: f
|
||||
|
||||
def __init__(self, name, fmt=None, style=FORMAT, stars=0):
|
||||
"""
|
||||
THIS CLASS IS BEING DEPRECATED.
|
||||
It was never meant to be used outside the scope of CmdMixin, and CmdMixin is being deprecated.
|
||||
See the deprecation notice in ``CmdMixin.__init__()`` below.
|
||||
|
||||
Creates a CLI-formatter for one specific argument. The argument may be a module parameter or just a named parameter for
|
||||
the CLI command execution.
|
||||
:param name: Name of the argument to be formatted
|
||||
:param fmt: Either a str to be formatted (using or not printf-style) or a callable that does that
|
||||
:param style: Whether arg_format (as str) should use printf-style formatting.
|
||||
Ignored if arg_format is None or not a str (should be callable).
|
||||
:param stars: A int with 0, 1 or 2 value, indicating to formatting the value as: value, *value or **value
|
||||
"""
|
||||
def printf_fmt(_fmt, v):
|
||||
try:
|
||||
return [_fmt % v]
|
||||
except TypeError as e:
|
||||
if e.args[0] != 'not all arguments converted during string formatting':
|
||||
raise
|
||||
return [_fmt]
|
||||
|
||||
_fmts = {
|
||||
ArgFormat.BOOLEAN: lambda _fmt, v: ([_fmt] if bool(v) else []),
|
||||
ArgFormat.BOOLEAN_NOT: lambda _fmt, v: ([] if bool(v) else [_fmt]),
|
||||
ArgFormat.PRINTF: printf_fmt,
|
||||
ArgFormat.FORMAT: lambda _fmt, v: [_fmt.format(v)],
|
||||
}
|
||||
|
||||
self.name = name
|
||||
self.stars = stars
|
||||
self.style = style
|
||||
|
||||
if fmt is None:
|
||||
fmt = "{0}"
|
||||
style = ArgFormat.FORMAT
|
||||
|
||||
if isinstance(fmt, str):
|
||||
func = _fmts[style]
|
||||
self.arg_format = partial(func, fmt)
|
||||
elif isinstance(fmt, list) or isinstance(fmt, tuple):
|
||||
self.arg_format = lambda v: [_fmts[style](f, v)[0] for f in fmt]
|
||||
elif hasattr(fmt, '__call__'):
|
||||
self.arg_format = fmt
|
||||
else:
|
||||
raise TypeError('Parameter fmt must be either: a string, a list/tuple of '
|
||||
'strings or a function: type={0}, value={1}'.format(type(fmt), fmt))
|
||||
|
||||
if stars:
|
||||
self.arg_format = (self.stars_deco(stars))(self.arg_format)
|
||||
|
||||
def to_text(self, value):
|
||||
if value is None and self.style != ArgFormat.BOOLEAN_NOT:
|
||||
return []
|
||||
func = self.arg_format
|
||||
return [str(p) for p in func(value)]
|
||||
|
||||
|
||||
class CmdMixin(object):
|
||||
"""
|
||||
THIS CLASS IS BEING DEPRECATED.
|
||||
See the deprecation notice in ``CmdMixin.__init__()`` below.
|
||||
|
||||
Mixin for mapping module options to running a CLI command with its arguments.
|
||||
"""
|
||||
command = None
|
||||
command_args_formats = {}
|
||||
run_command_fixed_options = {}
|
||||
check_rc = False
|
||||
force_lang = "C"
|
||||
|
||||
@property
|
||||
def module_formats(self):
|
||||
result = {}
|
||||
for param in self.module.params.keys():
|
||||
result[param] = ArgFormat(param)
|
||||
return result
|
||||
|
||||
@property
|
||||
def custom_formats(self):
|
||||
result = {}
|
||||
for param, fmt_spec in self.command_args_formats.items():
|
||||
result[param] = ArgFormat(param, **fmt_spec)
|
||||
return result
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CmdMixin, self).__init__(*args, **kwargs)
|
||||
self.module.deprecate(
|
||||
'The CmdMixin used in classes CmdModuleHelper and CmdStateModuleHelper is being deprecated. '
|
||||
'Modules should use community.general.plugins.module_utils.cmd_runner.CmdRunner instead.',
|
||||
version='8.0.0',
|
||||
collection_name='community.general',
|
||||
)
|
||||
|
||||
def _calculate_args(self, extra_params=None, params=None):
|
||||
def add_arg_formatted_param(_cmd_args, arg_format, _value):
|
||||
args = list(arg_format.to_text(_value))
|
||||
return _cmd_args + args
|
||||
|
||||
def find_format(_param):
|
||||
return self.custom_formats.get(_param, self.module_formats.get(_param))
|
||||
|
||||
extra_params = extra_params or dict()
|
||||
cmd_args = list([self.command]) if isinstance(self.command, str) else list(self.command)
|
||||
try:
|
||||
cmd_args[0] = self.module.get_bin_path(cmd_args[0], required=True)
|
||||
except ValueError:
|
||||
pass
|
||||
param_list = params if params else self.vars.keys()
|
||||
|
||||
for param in param_list:
|
||||
if isinstance(param, dict):
|
||||
if len(param) != 1:
|
||||
self.do_raise("run_command parameter as a dict must contain only one key: {0}".format(param))
|
||||
_param = list(param.keys())[0]
|
||||
fmt = find_format(_param)
|
||||
value = param[_param]
|
||||
elif isinstance(param, str):
|
||||
if param in self.vars.keys():
|
||||
fmt = find_format(param)
|
||||
value = self.vars[param]
|
||||
elif param in extra_params:
|
||||
fmt = find_format(param)
|
||||
value = extra_params[param]
|
||||
else:
|
||||
self.do_raise('Cannot determine value for parameter: {0}'.format(param))
|
||||
else:
|
||||
self.do_raise("run_command parameter must be either a str or a dict: {0}".format(param))
|
||||
cmd_args = add_arg_formatted_param(cmd_args, fmt, value)
|
||||
|
||||
return cmd_args
|
||||
|
||||
def process_command_output(self, rc, out, err):
|
||||
return rc, out, err
|
||||
|
||||
def run_command(self,
|
||||
extra_params=None,
|
||||
params=None,
|
||||
process_output=None,
|
||||
publish_rc=True,
|
||||
publish_out=True,
|
||||
publish_err=True,
|
||||
publish_cmd=True,
|
||||
*args, **kwargs):
|
||||
cmd_args = self._calculate_args(extra_params, params)
|
||||
options = dict(self.run_command_fixed_options)
|
||||
options['check_rc'] = options.get('check_rc', self.check_rc)
|
||||
options.update(kwargs)
|
||||
env_update = dict(options.get('environ_update', {}))
|
||||
if self.force_lang:
|
||||
env_update.update({
|
||||
'LANGUAGE': self.force_lang,
|
||||
'LC_ALL': self.force_lang,
|
||||
})
|
||||
self.update_output(force_lang=self.force_lang)
|
||||
options['environ_update'] = env_update
|
||||
rc, out, err = self.module.run_command(cmd_args, *args, **options)
|
||||
if publish_rc:
|
||||
self.update_output(rc=rc)
|
||||
if publish_out:
|
||||
self.update_output(stdout=out)
|
||||
if publish_err:
|
||||
self.update_output(stderr=err)
|
||||
if publish_cmd:
|
||||
self.update_output(cmd_args=cmd_args)
|
||||
if process_output is None:
|
||||
_process = self.process_command_output
|
||||
else:
|
||||
_process = process_output
|
||||
|
||||
return _process(rc, out, err)
|
||||
@@ -12,7 +12,6 @@ from ansible.module_utils.common.dict_transformations import dict_merge
|
||||
|
||||
# (TODO: remove AnsibleModule!) pylint: disable-next=unused-import
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.base import ModuleHelperBase, AnsibleModule # noqa: F401
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.cmd import CmdMixin
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.state import StateMixin
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.deps import DependencyMixin
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.vars import VarsMixin
|
||||
@@ -66,19 +65,3 @@ class ModuleHelper(DeprecateAttrsMixin, VarsMixin, DependencyMixin, ModuleHelper
|
||||
|
||||
class StateModuleHelper(StateMixin, ModuleHelper):
|
||||
pass
|
||||
|
||||
|
||||
class CmdModuleHelper(CmdMixin, ModuleHelper):
|
||||
"""
|
||||
THIS CLASS IS BEING DEPRECATED.
|
||||
See the deprecation notice in ``CmdMixin.__init__()``.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class CmdStateModuleHelper(CmdMixin, StateMixin, ModuleHelper):
|
||||
"""
|
||||
THIS CLASS IS BEING DEPRECATED.
|
||||
See the deprecation notice in ``CmdMixin.__init__()``.
|
||||
"""
|
||||
pass
|
||||
|
||||
@@ -11,9 +11,8 @@ __metaclass__ = type
|
||||
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.module_helper import (
|
||||
ModuleHelper, StateModuleHelper, CmdModuleHelper, CmdStateModuleHelper, AnsibleModule
|
||||
ModuleHelper, StateModuleHelper, AnsibleModule
|
||||
)
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.cmd import CmdMixin, ArgFormat # noqa: F401
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.state import StateMixin # noqa: F401
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.deps import DependencyCtxMgr, DependencyMixin # noqa: F401
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.exceptions import ModuleHelperException # noqa: F401
|
||||
|
||||
@@ -7,12 +7,6 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
# (TODO: remove next line!)
|
||||
import atexit # noqa: F401, pylint: disable=unused-import
|
||||
# (TODO: remove next line!)
|
||||
import time # noqa: F401, pylint: disable=unused-import
|
||||
# (TODO: remove next line!)
|
||||
import re # noqa: F401, pylint: disable=unused-import
|
||||
import traceback
|
||||
|
||||
PROXMOXER_IMP_ERR = None
|
||||
@@ -26,8 +20,6 @@ except ImportError:
|
||||
|
||||
|
||||
from ansible.module_utils.basic import env_fallback, missing_required_lib
|
||||
# (TODO: remove next line!)
|
||||
from ansible.module_utils.common.text.converters import to_native # noqa: F401, pylint: disable=unused-import
|
||||
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||
|
||||
|
||||
|
||||
@@ -107,6 +107,5 @@ def puppet_runner(module):
|
||||
verbose=cmd_runner_fmt.as_bool("--verbose"),
|
||||
),
|
||||
check_rc=False,
|
||||
force_lang=module.params["environment_lang"],
|
||||
)
|
||||
return runner
|
||||
|
||||
@@ -20,8 +20,6 @@ from ansible.module_utils.six import text_type
|
||||
from ansible.module_utils.six.moves import http_client
|
||||
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlparse
|
||||
from ansible.module_utils.ansible_release import __version__ as ansible_version
|
||||
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||
|
||||
GET_HEADERS = {'accept': 'application/json', 'OData-Version': '4.0'}
|
||||
POST_HEADERS = {'content-type': 'application/json', 'accept': 'application/json',
|
||||
@@ -132,7 +130,7 @@ class RedfishUtils(object):
|
||||
return resp
|
||||
|
||||
# The following functions are to send GET/POST/PATCH/DELETE requests
|
||||
def get_request(self, uri, override_headers=None, allow_no_resp=False):
|
||||
def get_request(self, uri, override_headers=None):
|
||||
req_headers = dict(GET_HEADERS)
|
||||
if override_headers:
|
||||
req_headers.update(override_headers)
|
||||
@@ -147,19 +145,13 @@ class RedfishUtils(object):
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
follow_redirects='all',
|
||||
use_proxy=True, timeout=self.timeout)
|
||||
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
|
||||
try:
|
||||
if headers.get('content-encoding') == 'gzip' and LooseVersion(ansible_version) < LooseVersion('2.14'):
|
||||
# Older versions of Ansible do not automatically decompress the data
|
||||
# Starting in 2.14, open_url will decompress the response data by default
|
||||
data = json.loads(to_native(gzip.open(BytesIO(resp.read()), 'rt', encoding='utf-8').read()))
|
||||
else:
|
||||
data = json.loads(to_native(resp.read()))
|
||||
except Exception as e:
|
||||
# No response data; this is okay in certain cases
|
||||
data = None
|
||||
if not allow_no_resp:
|
||||
raise
|
||||
if override_headers:
|
||||
resp = gzip.open(BytesIO(resp.read()), 'rt', encoding='utf-8')
|
||||
data = json.loads(to_native(resp.read()))
|
||||
headers = req_headers
|
||||
else:
|
||||
data = json.loads(to_native(resp.read()))
|
||||
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
|
||||
except HTTPError as e:
|
||||
msg = self._get_extended_message(e)
|
||||
return {'ret': False,
|
||||
@@ -1821,7 +1813,7 @@ class RedfishUtils(object):
|
||||
return {'ret': False, 'msg': 'Must provide a handle tracking the update.'}
|
||||
|
||||
# Get the task or job tracking the update
|
||||
response = self.get_request(self.root_uri + update_handle, allow_no_resp=True)
|
||||
response = self.get_request(self.root_uri + update_handle)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
|
||||
@@ -2915,7 +2907,8 @@ class RedfishUtils(object):
|
||||
|
||||
# Get a list of all Chassis and build URIs, then get all PowerSupplies
|
||||
# from each Power entry in the Chassis
|
||||
for chassis_uri in self.chassis_uris:
|
||||
chassis_uri_list = self.chassis_uris
|
||||
for chassis_uri in chassis_uri_list:
|
||||
response = self.get_request(self.root_uri + chassis_uri)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
@@ -3477,30 +3470,33 @@ class RedfishUtils(object):
|
||||
result = {}
|
||||
key = "Thermal"
|
||||
# Go through list
|
||||
for chassis_uri in self.chassis_uris:
|
||||
for chassis_uri in self.chassis_uri_list:
|
||||
response = self.get_request(self.root_uri + chassis_uri)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
result['ret'] = True
|
||||
data = response['data']
|
||||
val = data.get('Oem', {}).get('Hpe', {}).get('ThermalConfiguration')
|
||||
if val is not None:
|
||||
return {"ret": True, "current_thermal_config": val}
|
||||
return {"ret": False}
|
||||
oem = data.get['Oem']
|
||||
hpe = oem.get['Hpe']
|
||||
thermal_config = hpe.get('ThermalConfiguration')
|
||||
result["current_thermal_config"] = thermal_config
|
||||
return result
|
||||
|
||||
def get_hpe_fan_percent_min(self):
|
||||
result = {}
|
||||
key = "Thermal"
|
||||
# Go through list
|
||||
for chassis_uri in self.chassis_uris:
|
||||
for chassis_uri in self.chassis_uri_list:
|
||||
response = self.get_request(self.root_uri + chassis_uri)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
result['ret'] = True
|
||||
data = response['data']
|
||||
val = data.get('Oem', {}).get('Hpe', {}).get('FanPercentMinimum')
|
||||
if val is not None:
|
||||
return {"ret": True, "fan_percent_min": val}
|
||||
return {"ret": False}
|
||||
oem = data.get['Oem']
|
||||
hpe = oem.get['Hpe']
|
||||
fan_percent_min_config = hpe.get('FanPercentMinimum')
|
||||
result["fan_percent_min"] = fan_percent_min_config
|
||||
return result
|
||||
|
||||
def delete_volumes(self, storage_subsystem_id, volume_ids):
|
||||
# Find the Storage resource from the requested ComputerSystem resource
|
||||
|
||||
@@ -66,6 +66,19 @@ class _Variable(object):
|
||||
if verbosity is not None:
|
||||
self.verbosity = verbosity
|
||||
|
||||
def as_dict(self, meta_only=False):
|
||||
d = {
|
||||
"diff": self.diff,
|
||||
"change": self.change,
|
||||
"output": self.output,
|
||||
"fact": self.fact,
|
||||
"verbosity": self.verbosity,
|
||||
}
|
||||
if not meta_only:
|
||||
d["initial_value"] = copy.deepcopy(self.initial_value)
|
||||
d["value"] = self.value
|
||||
return d
|
||||
|
||||
def set_value(self, value):
|
||||
if not self.init:
|
||||
self.initial_value = copy.deepcopy(value)
|
||||
@@ -93,7 +106,7 @@ class _Variable(object):
|
||||
|
||||
|
||||
class VarDict(object):
|
||||
reserved_names = ('__vars__', 'var', 'set_meta', 'set', 'output', 'diff', 'facts', 'has_changed')
|
||||
reserved_names = ('__vars__', '_var', 'var', 'set_meta', 'get_meta', 'set', 'output', 'diff', 'facts', 'has_changed', 'as_dict')
|
||||
|
||||
def __init__(self):
|
||||
self.__vars__ = dict()
|
||||
@@ -119,6 +132,9 @@ class VarDict(object):
|
||||
def _var(self, name):
|
||||
return self.__vars__[name]
|
||||
|
||||
def var(self, name):
|
||||
return self._var(name).as_dict()
|
||||
|
||||
def set_meta(self, name, **kwargs):
|
||||
"""Set the metadata for the variable
|
||||
|
||||
@@ -133,6 +149,9 @@ class VarDict(object):
|
||||
"""
|
||||
self._var(name).set_meta(**kwargs)
|
||||
|
||||
def get_meta(self, name):
|
||||
return self._var(name).as_dict(meta_only=True)
|
||||
|
||||
def set(self, name, value, **kwargs):
|
||||
"""Set the value and optionally metadata for a variable. The variable is not required to exist prior to calling `set`.
|
||||
|
||||
@@ -172,7 +191,7 @@ class VarDict(object):
|
||||
|
||||
@property
|
||||
def has_changed(self):
|
||||
return any(True for var in self.__vars__.values() if var.has_changed)
|
||||
return any(var.has_changed for var in self.__vars__.values())
|
||||
|
||||
def as_dict(self):
|
||||
return dict((name, var.value) for name, var in self.__vars__.items())
|
||||
|
||||
@@ -10,13 +10,4 @@ from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
from ansible.module_utils.six import raise_from
|
||||
|
||||
try:
|
||||
from ansible.module_utils.compat.version import LooseVersion # noqa: F401, pylint: disable=unused-import
|
||||
except ImportError:
|
||||
try:
|
||||
from distutils.version import LooseVersion # noqa: F401, pylint: disable=unused-import
|
||||
except ImportError as exc:
|
||||
msg = 'To use this plugin or module with ansible-core 2.11, you need to use Python < 3.12 with distutils.version present'
|
||||
raise_from(ImportError(msg), exc)
|
||||
from ansible.module_utils.compat.version import LooseVersion # noqa: F401, pylint: disable=unused-import
|
||||
|
||||
@@ -38,8 +38,8 @@ options:
|
||||
type: list
|
||||
elements: str
|
||||
default:
|
||||
- agblksize=4096
|
||||
- isnapshot=no
|
||||
- agblksize='4096'
|
||||
- isnapshot='no'
|
||||
auto_mount:
|
||||
description:
|
||||
- File system is automatically mounted at system restart.
|
||||
@@ -242,7 +242,7 @@ def _validate_vg(module, vg):
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed executing %s command." % lsvg_cmd)
|
||||
|
||||
rc, current_all_vgs, err = module.run_command([lsvg_cmd])
|
||||
rc, current_all_vgs, err = module.run_command([lsvg_cmd, "%s"])
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed executing %s command." % lsvg_cmd)
|
||||
|
||||
@@ -365,53 +365,7 @@ def create_fs(
|
||||
# Creates a LVM file system.
|
||||
crfs_cmd = module.get_bin_path('crfs', True)
|
||||
if not module.check_mode:
|
||||
cmd = [crfs_cmd]
|
||||
|
||||
cmd.append("-v")
|
||||
cmd.append(fs_type)
|
||||
|
||||
if vg:
|
||||
(flag, value) = vg.split()
|
||||
cmd.append(flag)
|
||||
cmd.append(value)
|
||||
|
||||
if device:
|
||||
(flag, value) = device.split()
|
||||
cmd.append(flag)
|
||||
cmd.append(value)
|
||||
|
||||
cmd.append("-m")
|
||||
cmd.append(filesystem)
|
||||
|
||||
if mount_group:
|
||||
(flag, value) = mount_group.split()
|
||||
cmd.append(flag)
|
||||
cmd.append(value)
|
||||
|
||||
if auto_mount:
|
||||
(flag, value) = auto_mount.split()
|
||||
cmd.append(flag)
|
||||
cmd.append(value)
|
||||
|
||||
if account_subsystem:
|
||||
(flag, value) = account_subsystem.split()
|
||||
cmd.append(flag)
|
||||
cmd.append(value)
|
||||
|
||||
cmd.append("-p")
|
||||
cmd.append(permissions)
|
||||
|
||||
if size:
|
||||
(flag, value) = size.split()
|
||||
cmd.append(flag)
|
||||
cmd.append(value)
|
||||
|
||||
if attributes:
|
||||
splitted_attributes = attributes.split()
|
||||
cmd.append("-a")
|
||||
for value in splitted_attributes:
|
||||
cmd.append(value)
|
||||
|
||||
cmd = [crfs_cmd, "-v", fs_type, "-m", filesystem, vg, device, mount_group, auto_mount, account_subsystem, "-p", permissions, size, "-a", attributes]
|
||||
rc, crfs_out, err = module.run_command(cmd)
|
||||
|
||||
if rc == 10:
|
||||
@@ -507,7 +461,7 @@ def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
account_subsystem=dict(type='bool', default=False),
|
||||
attributes=dict(type='list', elements='str', default=["agblksize=4096", "isnapshot=no"]),
|
||||
attributes=dict(type='list', elements='str', default=["agblksize='4096'", "isnapshot='no'"]),
|
||||
auto_mount=dict(type='bool', default=True),
|
||||
device=dict(type='str'),
|
||||
filesystem=dict(type='str', required=True),
|
||||
|
||||
@@ -17,15 +17,13 @@ version_added: 3.5.0
|
||||
description:
|
||||
- This module allows the installation of Ansible collections or roles using C(ansible-galaxy).
|
||||
notes:
|
||||
- >
|
||||
B(Ansible 2.9/2.10): The C(ansible-galaxy) command changed significantly between Ansible 2.9 and
|
||||
ansible-base 2.10 (later ansible-core 2.11). See comments in the parameters.
|
||||
- Support for B(Ansible 2.9/2.10) was removed in community.general 8.0.0.
|
||||
- >
|
||||
The module will try and run using the C(C.UTF-8) locale.
|
||||
If that fails, it will try C(en_US.UTF-8).
|
||||
If that one also fails, the module will fail.
|
||||
requirements:
|
||||
- Ansible 2.9, ansible-base 2.10, or ansible-core 2.11 or newer
|
||||
- ansible-core 2.11 or newer
|
||||
extends_documentation_fragment:
|
||||
- community.general.attributes
|
||||
attributes:
|
||||
@@ -75,24 +73,16 @@ options:
|
||||
description:
|
||||
- Force overwriting an existing role or collection.
|
||||
- Using O(force=true) is mandatory when downgrading.
|
||||
- "B(Ansible 2.9 and 2.10): Must be V(true) to upgrade roles and collections."
|
||||
type: bool
|
||||
default: false
|
||||
ack_ansible29:
|
||||
description:
|
||||
- Acknowledge using Ansible 2.9 with its limitations, and prevents the module from generating warnings about them.
|
||||
- This option is completely ignored if using a version of Ansible greater than C(2.9.x).
|
||||
- Note that this option will be removed without any further deprecation warning once support
|
||||
for Ansible 2.9 is removed from this module.
|
||||
- This option has no longer any effect and will be removed in community.general 9.0.0.
|
||||
type: bool
|
||||
default: false
|
||||
ack_min_ansiblecore211:
|
||||
description:
|
||||
- Acknowledge the module is deprecating support for Ansible 2.9 and ansible-base 2.10.
|
||||
- Support for those versions will be removed in community.general 8.0.0.
|
||||
At the same time, this option will be removed without any deprecation warning!
|
||||
- This option is completely ignored if using a version of ansible-core/ansible-base/Ansible greater than C(2.11).
|
||||
- For the sake of conciseness, setting this parameter to V(true) implies O(ack_ansible29=true).
|
||||
- This option has no longer any effect and will be removed in community.general 9.0.0.
|
||||
type: bool
|
||||
default: false
|
||||
"""
|
||||
@@ -147,7 +137,6 @@ RETURN = """
|
||||
description:
|
||||
- If O(requirements_file) is specified instead, returns dictionary with all the roles installed per path.
|
||||
- If O(name) is specified, returns that role name and the version installed per path.
|
||||
- "B(Ansible 2.9): Returns empty because C(ansible-galaxy) has no C(list) subcommand."
|
||||
type: dict
|
||||
returned: always when installing roles
|
||||
contains:
|
||||
@@ -164,7 +153,6 @@ RETURN = """
|
||||
description:
|
||||
- If O(requirements_file) is specified instead, returns dictionary with all the collections installed per path.
|
||||
- If O(name) is specified, returns that collection name and the version installed per path.
|
||||
- "B(Ansible 2.9): Returns empty because C(ansible-galaxy) has no C(list) subcommand."
|
||||
type: dict
|
||||
returned: always when installing collections
|
||||
contains:
|
||||
@@ -206,7 +194,6 @@ class AnsibleGalaxyInstall(ModuleHelper):
|
||||
_RE_LIST_ROLE = re.compile(r'^- (?P<elem>\w+\.\w+),\s+(?P<version>[\d\.]+)\s*$')
|
||||
_RE_INSTALL_OUTPUT = None # Set after determining ansible version, see __init_module__()
|
||||
ansible_version = None
|
||||
is_ansible29 = None
|
||||
|
||||
output_params = ('type', 'name', 'dest', 'requirements_file', 'force', 'no_deps')
|
||||
module = dict(
|
||||
@@ -217,8 +204,18 @@ class AnsibleGalaxyInstall(ModuleHelper):
|
||||
dest=dict(type='path'),
|
||||
force=dict(type='bool', default=False),
|
||||
no_deps=dict(type='bool', default=False),
|
||||
ack_ansible29=dict(type='bool', default=False),
|
||||
ack_min_ansiblecore211=dict(type='bool', default=False),
|
||||
ack_ansible29=dict(
|
||||
type='bool',
|
||||
default=False,
|
||||
removed_in_version='9.0.0',
|
||||
removed_from_collection='community.general',
|
||||
),
|
||||
ack_min_ansiblecore211=dict(
|
||||
type='bool',
|
||||
default=False,
|
||||
removed_in_version='9.0.0',
|
||||
removed_from_collection='community.general',
|
||||
),
|
||||
),
|
||||
mutually_exclusive=[('name', 'requirements_file')],
|
||||
required_one_of=[('name', 'requirements_file')],
|
||||
@@ -268,26 +265,22 @@ class AnsibleGalaxyInstall(ModuleHelper):
|
||||
def __init_module__(self):
|
||||
# self.runner = CmdRunner(self.module, command=self.command, arg_formats=self.command_args_formats, force_lang=self.force_lang)
|
||||
self.runner, self.ansible_version = self._get_ansible_galaxy_version()
|
||||
if self.ansible_version < (2, 11) and not self.vars.ack_min_ansiblecore211:
|
||||
self.module.deprecate(
|
||||
"Support for Ansible 2.9 and ansible-base 2.10 is being deprecated. "
|
||||
"At the same time support for them is ended, also the ack_ansible29 option will be removed. "
|
||||
"Upgrading is strongly recommended, or set 'ack_min_ansiblecore211' to suppress this message.",
|
||||
version="8.0.0",
|
||||
collection_name="community.general",
|
||||
if self.ansible_version < (2, 11):
|
||||
self.module.fail_json(
|
||||
msg="Support for Ansible 2.9 and ansible-base 2.10 has ben removed."
|
||||
)
|
||||
self.is_ansible29 = self.ansible_version < (2, 10)
|
||||
if self.is_ansible29:
|
||||
self._RE_INSTALL_OUTPUT = re.compile(r"^(?:.*Installing '(?P<collection>\w+\.\w+):(?P<cversion>[\d\.]+)'.*"
|
||||
r'|- (?P<role>\w+\.\w+) \((?P<rversion>[\d\.]+)\)'
|
||||
r' was installed successfully)$')
|
||||
else:
|
||||
# Collection install output changed:
|
||||
# ansible-base 2.10: "coll.name (x.y.z)"
|
||||
# ansible-core 2.11+: "coll.name:x.y.z"
|
||||
self._RE_INSTALL_OUTPUT = re.compile(r'^(?:(?P<collection>\w+\.\w+)(?: \(|:)(?P<cversion>[\d\.]+)\)?'
|
||||
r'|- (?P<role>\w+\.\w+) \((?P<rversion>[\d\.]+)\))'
|
||||
r' was installed successfully$')
|
||||
# Collection install output changed:
|
||||
# ansible-base 2.10: "coll.name (x.y.z)"
|
||||
# ansible-core 2.11+: "coll.name:x.y.z"
|
||||
self._RE_INSTALL_OUTPUT = re.compile(r'^(?:(?P<collection>\w+\.\w+)(?: \(|:)(?P<cversion>[\d\.]+)\)?'
|
||||
r'|- (?P<role>\w+\.\w+) \((?P<rversion>[\d\.]+)\))'
|
||||
r' was installed successfully$')
|
||||
self.vars.set("new_collections", {}, change=True)
|
||||
self.vars.set("new_roles", {}, change=True)
|
||||
if self.vars.type != "collection":
|
||||
self.vars.installed_roles = self._list_roles()
|
||||
if self.vars.type != "roles":
|
||||
self.vars.installed_collections = self._list_collections()
|
||||
|
||||
def _list_element(self, _type, path_re, elem_re):
|
||||
def process(rc, out, err):
|
||||
@@ -322,24 +315,8 @@ class AnsibleGalaxyInstall(ModuleHelper):
|
||||
def _list_roles(self):
|
||||
return self._list_element('role', self._RE_LIST_PATH, self._RE_LIST_ROLE)
|
||||
|
||||
def _setup29(self):
|
||||
self.vars.set("new_collections", {})
|
||||
self.vars.set("new_roles", {})
|
||||
self.vars.set("ansible29_change", False, change=True, output=False)
|
||||
if not (self.vars.ack_ansible29 or self.vars.ack_min_ansiblecore211):
|
||||
self.warn("Ansible 2.9 or older: unable to retrieve lists of roles and collections already installed")
|
||||
if self.vars.requirements_file is not None and self.vars.type == 'both':
|
||||
self.warn("Ansible 2.9 or older: will install only roles from requirement files")
|
||||
|
||||
def _setup210plus(self):
|
||||
self.vars.set("new_collections", {}, change=True)
|
||||
self.vars.set("new_roles", {}, change=True)
|
||||
if self.vars.type != "collection":
|
||||
self.vars.installed_roles = self._list_roles()
|
||||
if self.vars.type != "roles":
|
||||
self.vars.installed_collections = self._list_collections()
|
||||
|
||||
def __run__(self):
|
||||
|
||||
def process(rc, out, err):
|
||||
for line in out.splitlines():
|
||||
match = self._RE_INSTALL_OUTPUT.match(line)
|
||||
@@ -347,19 +324,9 @@ class AnsibleGalaxyInstall(ModuleHelper):
|
||||
continue
|
||||
if match.group("collection"):
|
||||
self.vars.new_collections[match.group("collection")] = match.group("cversion")
|
||||
if self.is_ansible29:
|
||||
self.vars.ansible29_change = True
|
||||
elif match.group("role"):
|
||||
self.vars.new_roles[match.group("role")] = match.group("rversion")
|
||||
if self.is_ansible29:
|
||||
self.vars.ansible29_change = True
|
||||
|
||||
if self.is_ansible29:
|
||||
if self.vars.type == 'both':
|
||||
raise ValueError("Type 'both' not supported in Ansible 2.9")
|
||||
self._setup29()
|
||||
else:
|
||||
self._setup210plus()
|
||||
with self.runner("type galaxy_cmd force no_deps dest requirements_file name", output_process=process) as ctx:
|
||||
ctx.run(galaxy_cmd="install")
|
||||
if self.verbosity > 2:
|
||||
|
||||
@@ -35,9 +35,7 @@ options:
|
||||
default: false
|
||||
name:
|
||||
description:
|
||||
- A package name, like V(foo), or multiple packages, like V(foo,bar).
|
||||
- Do not include additional whitespace when specifying multiple packages as a string.
|
||||
Prefer YAML lists over comma-separating multiple package names.
|
||||
- A package name, like V(foo), or multiple packages, like V(foo, bar).
|
||||
type: list
|
||||
elements: str
|
||||
no_cache:
|
||||
@@ -63,7 +61,7 @@ options:
|
||||
type: str
|
||||
update_cache:
|
||||
description:
|
||||
- Update repository indexes. Can be run with other steps or on its own.
|
||||
- Update repository indexes. Can be run with other steps or on it's own.
|
||||
type: bool
|
||||
default: false
|
||||
upgrade:
|
||||
|
||||
@@ -28,6 +28,9 @@ options:
|
||||
package:
|
||||
description:
|
||||
- List of packages to install, upgrade, or remove.
|
||||
- Since community.general 8.0.0, may include paths to local C(.rpm) files
|
||||
if O(state=installed) or O(state=present), requires C(rpm) python
|
||||
module.
|
||||
aliases: [ name, pkg ]
|
||||
type: list
|
||||
elements: str
|
||||
@@ -63,6 +66,9 @@ options:
|
||||
type: bool
|
||||
default: false
|
||||
version_added: 6.5.0
|
||||
requirements:
|
||||
- C(rpm) python package (rpm bindings), optional. Required if O(package)
|
||||
option includes local files.
|
||||
author:
|
||||
- Evgenii Terechkov (@evgkrsk)
|
||||
'''
|
||||
@@ -109,8 +115,22 @@ EXAMPLES = '''
|
||||
'''
|
||||
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.basic import (
|
||||
AnsibleModule,
|
||||
missing_required_lib,
|
||||
)
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
try:
|
||||
import rpm
|
||||
except ImportError:
|
||||
HAS_RPM_PYTHON = False
|
||||
RPM_PYTHON_IMPORT_ERROR = traceback.format_exc()
|
||||
else:
|
||||
HAS_RPM_PYTHON = True
|
||||
RPM_PYTHON_IMPORT_ERROR = None
|
||||
|
||||
APT_PATH = "/usr/bin/apt-get"
|
||||
RPM_PATH = "/usr/bin/rpm"
|
||||
@@ -118,6 +138,23 @@ APT_GET_ZERO = "\n0 upgraded, 0 newly installed"
|
||||
UPDATE_KERNEL_ZERO = "\nTry to install new kernel "
|
||||
|
||||
|
||||
def local_rpm_package_name(path):
|
||||
"""return package name of a local rpm passed in.
|
||||
Inspired by ansible.builtin.yum"""
|
||||
|
||||
ts = rpm.TransactionSet()
|
||||
ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
|
||||
fd = os.open(path, os.O_RDONLY)
|
||||
try:
|
||||
header = ts.hdrFromFdno(fd)
|
||||
except rpm.error as e:
|
||||
return None
|
||||
finally:
|
||||
os.close(fd)
|
||||
|
||||
return to_native(header[rpm.RPMTAG_NAME])
|
||||
|
||||
|
||||
def query_package(module, name):
|
||||
# rpm -q returns 0 if the package is installed,
|
||||
# 1 if it is not installed
|
||||
@@ -131,6 +168,16 @@ def query_package(module, name):
|
||||
def query_package_provides(module, name):
|
||||
# rpm -q returns 0 if the package is installed,
|
||||
# 1 if it is not installed
|
||||
if name.endswith('.rpm'):
|
||||
# Likely a local RPM file
|
||||
if not HAS_RPM_PYTHON:
|
||||
module.fail_json(
|
||||
msg=missing_required_lib('rpm'),
|
||||
exception=RPM_PYTHON_IMPORT_ERROR,
|
||||
)
|
||||
|
||||
name = local_rpm_package_name(name)
|
||||
|
||||
rc, out, err = module.run_command("%s -q --provides %s" % (RPM_PATH, name))
|
||||
return rc == 0
|
||||
|
||||
@@ -204,7 +251,7 @@ def install_packages(module, pkgspec):
|
||||
rc, out, err = module.run_command("%s -y install %s" % (APT_PATH, packages), environ_update={"LANG": "C"})
|
||||
|
||||
installed = True
|
||||
for package in pkgspec:
|
||||
for packages in pkgspec:
|
||||
if not query_package_provides(module, package):
|
||||
installed = False
|
||||
|
||||
|
||||
@@ -102,40 +102,40 @@ EXAMPLES = r'''
|
||||
- name: Create a @home subvolume under the root subvolume
|
||||
community.general.btrfs_subvolume:
|
||||
name: /@home
|
||||
filesystem_device: /dev/vda2
|
||||
device: /dev/vda2
|
||||
|
||||
- name: Remove the @home subvolume if it exists
|
||||
community.general.btrfs_subvolume:
|
||||
name: /@home
|
||||
state: absent
|
||||
filesystem_device: /dev/vda2
|
||||
device: /dev/vda2
|
||||
|
||||
- name: Create a snapshot of the root subvolume named @
|
||||
community.general.btrfs_subvolume:
|
||||
name: /@
|
||||
snapshot_source: /
|
||||
filesystem_device: /dev/vda2
|
||||
device: /dev/vda2
|
||||
|
||||
- name: Create a snapshot of the root subvolume and make it the new default subvolume
|
||||
community.general.btrfs_subvolume:
|
||||
name: /@
|
||||
snapshot_source: /
|
||||
default: Yes
|
||||
filesystem_device: /dev/vda2
|
||||
device: /dev/vda2
|
||||
|
||||
- name: Create a snapshot of the /@ subvolume and recursively creating intermediate subvolumes as required
|
||||
community.general.btrfs_subvolume:
|
||||
name: /@snapshots/@2022_06_09
|
||||
snapshot_source: /@
|
||||
recursive: True
|
||||
filesystem_device: /dev/vda2
|
||||
device: /dev/vda2
|
||||
|
||||
- name: Remove the /@ subvolume and recursively delete child subvolumes as required
|
||||
community.general.btrfs_subvolume:
|
||||
name: /@snapshots/@2022_06_09
|
||||
snapshot_source: /@
|
||||
recursive: True
|
||||
filesystem_device: /dev/vda2
|
||||
device: /dev/vda2
|
||||
|
||||
'''
|
||||
|
||||
|
||||
@@ -137,10 +137,6 @@ class Cargo(object):
|
||||
|
||||
def get_installed(self):
|
||||
cmd = ["install", "--list"]
|
||||
if self.path:
|
||||
cmd.append("--root")
|
||||
cmd.append(self.path)
|
||||
|
||||
data, dummy = self._exec(cmd, True, False, False)
|
||||
|
||||
package_regex = re.compile(r"^([\w\-]+) v(.+):$")
|
||||
|
||||
@@ -59,6 +59,20 @@ options:
|
||||
- Required for O(type=TLSA) when O(state=present).
|
||||
type: int
|
||||
choices: [ 0, 1, 2, 3 ]
|
||||
flag:
|
||||
description:
|
||||
- Issuer Critical Flag.
|
||||
- Required for O(type=CAA) when O(state=present).
|
||||
type: int
|
||||
choices: [ 0, 1 ]
|
||||
version_added: 8.0.0
|
||||
tag:
|
||||
description:
|
||||
- CAA issue restriction.
|
||||
- Required for O(type=CAA) when O(state=present).
|
||||
type: str
|
||||
choices: [ issue, issuewild, iodef ]
|
||||
version_added: 8.0.0
|
||||
hash_type:
|
||||
description:
|
||||
- Hash type number.
|
||||
@@ -138,9 +152,8 @@ options:
|
||||
description:
|
||||
- The type of DNS record to create. Required if O(state=present).
|
||||
- O(type=DS), O(type=SSHFP), and O(type=TLSA) were added in Ansible 2.7.
|
||||
- Note that V(SPF) is no longer supported by CloudFlare. Support for it will be removed from community.general 9.0.0.
|
||||
type: str
|
||||
choices: [ A, AAAA, CNAME, DS, MX, NS, SPF, SRV, SSHFP, TLSA, TXT ]
|
||||
choices: [ A, AAAA, CNAME, DS, MX, NS, SPF, SRV, SSHFP, TLSA, CAA, TXT ]
|
||||
value:
|
||||
description:
|
||||
- The record value.
|
||||
@@ -263,6 +276,15 @@ EXAMPLES = r'''
|
||||
hash_type: 1
|
||||
value: 6b76d034492b493e15a7376fccd08e63befdad0edab8e442562f532338364bf3
|
||||
|
||||
- name: Create a CAA record subdomain.example.com
|
||||
community.general.cloudflare_dns:
|
||||
zone: example.com
|
||||
record: subdomain
|
||||
type: CAA
|
||||
flag: 0
|
||||
tag: issue
|
||||
value: ca.example.com
|
||||
|
||||
- name: Create a DS record for subdomain.example.com
|
||||
community.general.cloudflare_dns:
|
||||
zone: example.com
|
||||
@@ -292,7 +314,7 @@ record:
|
||||
sample: "2016-03-25T19:09:42.516553Z"
|
||||
data:
|
||||
description: Additional record data.
|
||||
returned: success, if type is SRV, DS, SSHFP or TLSA
|
||||
returned: success, if type is SRV, DS, SSHFP TLSA or CAA
|
||||
type: dict
|
||||
sample: {
|
||||
name: "jabber",
|
||||
@@ -392,6 +414,8 @@ class CloudflareAPI(object):
|
||||
self.algorithm = module.params['algorithm']
|
||||
self.cert_usage = module.params['cert_usage']
|
||||
self.hash_type = module.params['hash_type']
|
||||
self.flag = module.params['flag']
|
||||
self.tag = module.params['tag']
|
||||
self.key_tag = module.params['key_tag']
|
||||
self.port = module.params['port']
|
||||
self.priority = module.params['priority']
|
||||
@@ -596,7 +620,7 @@ class CloudflareAPI(object):
|
||||
def delete_dns_records(self, **kwargs):
|
||||
params = {}
|
||||
for param in ['port', 'proto', 'service', 'solo', 'type', 'record', 'value', 'weight', 'zone',
|
||||
'algorithm', 'cert_usage', 'hash_type', 'selector', 'key_tag']:
|
||||
'algorithm', 'cert_usage', 'hash_type', 'selector', 'key_tag', 'flag', 'tag']:
|
||||
if param in kwargs:
|
||||
params[param] = kwargs[param]
|
||||
else:
|
||||
@@ -614,7 +638,7 @@ class CloudflareAPI(object):
|
||||
content = str(params['key_tag']) + '\t' + str(params['algorithm']) + '\t' + str(params['hash_type']) + '\t' + params['value']
|
||||
elif params['type'] == 'SSHFP':
|
||||
if not (params['value'] is None or params['value'] == ''):
|
||||
content = str(params['algorithm']) + ' ' + str(params['hash_type']) + ' ' + params['value'].upper()
|
||||
content = str(params['algorithm']) + '\t' + str(params['hash_type']) + '\t' + params['value']
|
||||
elif params['type'] == 'TLSA':
|
||||
if not (params['value'] is None or params['value'] == ''):
|
||||
content = str(params['cert_usage']) + '\t' + str(params['selector']) + '\t' + str(params['hash_type']) + '\t' + params['value']
|
||||
@@ -641,7 +665,7 @@ class CloudflareAPI(object):
|
||||
def ensure_dns_record(self, **kwargs):
|
||||
params = {}
|
||||
for param in ['port', 'priority', 'proto', 'proxied', 'service', 'ttl', 'type', 'record', 'value', 'weight', 'zone',
|
||||
'algorithm', 'cert_usage', 'hash_type', 'selector', 'key_tag']:
|
||||
'algorithm', 'cert_usage', 'hash_type', 'selector', 'key_tag', 'flag', 'tag']:
|
||||
if param in kwargs:
|
||||
params[param] = kwargs[param]
|
||||
else:
|
||||
@@ -727,7 +751,7 @@ class CloudflareAPI(object):
|
||||
if (attr is None) or (attr == ''):
|
||||
self.module.fail_json(msg="You must provide algorithm, hash_type and a value to create this record type")
|
||||
sshfp_data = {
|
||||
"fingerprint": params['value'].upper(),
|
||||
"fingerprint": params['value'],
|
||||
"type": params['hash_type'],
|
||||
"algorithm": params['algorithm'],
|
||||
}
|
||||
@@ -737,7 +761,7 @@ class CloudflareAPI(object):
|
||||
'data': sshfp_data,
|
||||
"ttl": params['ttl'],
|
||||
}
|
||||
search_value = str(params['algorithm']) + ' ' + str(params['hash_type']) + ' ' + params['value']
|
||||
search_value = str(params['algorithm']) + '\t' + str(params['hash_type']) + '\t' + params['value']
|
||||
|
||||
if params['type'] == 'TLSA':
|
||||
for attr in [params['port'], params['proto'], params['cert_usage'], params['selector'], params['hash_type'], params['value']]:
|
||||
@@ -758,12 +782,36 @@ class CloudflareAPI(object):
|
||||
}
|
||||
search_value = str(params['cert_usage']) + '\t' + str(params['selector']) + '\t' + str(params['hash_type']) + '\t' + params['value']
|
||||
|
||||
if params['type'] == 'CAA':
|
||||
for attr in [params['flag'], params['tag'], params['value']]:
|
||||
if (attr is None) or (attr == ''):
|
||||
self.module.fail_json(msg="You must provide flag, tag and a value to create this record type")
|
||||
caa_data = {
|
||||
"flags": params['flag'],
|
||||
"tag": params['tag'],
|
||||
"value": params['value'],
|
||||
}
|
||||
new_record = {
|
||||
"type": params['type'],
|
||||
"name": params['record'],
|
||||
'data': caa_data,
|
||||
"ttl": params['ttl'],
|
||||
}
|
||||
search_value = None
|
||||
|
||||
zone_id = self._get_zone_id(params['zone'])
|
||||
records = self.get_dns_records(params['zone'], params['type'], search_record, search_value)
|
||||
# in theory this should be impossible as cloudflare does not allow
|
||||
# the creation of duplicate records but lets cover it anyways
|
||||
if len(records) > 1:
|
||||
self.module.fail_json(msg="More than one record already exists for the given attributes. That should be impossible, please open an issue!")
|
||||
# As Cloudflare API cannot filter record containing quotes
|
||||
# CAA records must be compared locally
|
||||
if params['type'] == 'CAA':
|
||||
for rr in records:
|
||||
if rr['data']['flags'] == caa_data['flags'] and rr['data']['tag'] == caa_data['tag'] and rr['data']['value'] == caa_data['value']:
|
||||
return rr, self.changed
|
||||
else:
|
||||
self.module.fail_json(msg="More than one record already exists for the given attributes. That should be impossible, please open an issue!")
|
||||
# record already exists, check if it must be updated
|
||||
if len(records) == 1:
|
||||
cur_record = records[0]
|
||||
@@ -812,6 +860,8 @@ def main():
|
||||
hash_type=dict(type='int', choices=[1, 2]),
|
||||
key_tag=dict(type='int', no_log=False),
|
||||
port=dict(type='int'),
|
||||
flag=dict(type='int', choices=[0, 1]),
|
||||
tag=dict(type='str', choices=['issue', 'issuewild', 'iodef']),
|
||||
priority=dict(type='int', default=1),
|
||||
proto=dict(type='str'),
|
||||
proxied=dict(type='bool', default=False),
|
||||
@@ -822,7 +872,7 @@ def main():
|
||||
state=dict(type='str', default='present', choices=['absent', 'present']),
|
||||
timeout=dict(type='int', default=30),
|
||||
ttl=dict(type='int', default=1),
|
||||
type=dict(type='str', choices=['A', 'AAAA', 'CNAME', 'DS', 'MX', 'NS', 'SPF', 'SRV', 'SSHFP', 'TLSA', 'TXT']),
|
||||
type=dict(type='str', choices=['A', 'AAAA', 'CNAME', 'DS', 'MX', 'NS', 'SPF', 'SRV', 'SSHFP', 'TLSA', 'CAA', 'TXT']),
|
||||
value=dict(type='str', aliases=['content']),
|
||||
weight=dict(type='int', default=1),
|
||||
zone=dict(type='str', required=True, aliases=['domain']),
|
||||
@@ -833,6 +883,7 @@ def main():
|
||||
('state', 'absent', ['record']),
|
||||
('type', 'SRV', ['proto', 'service']),
|
||||
('type', 'TLSA', ['proto', 'port']),
|
||||
('type', 'CAA', ['flag', 'tag']),
|
||||
],
|
||||
)
|
||||
|
||||
@@ -859,6 +910,13 @@ def main():
|
||||
and (module.params['value'] is None or module.params['value'] == ''))):
|
||||
module.fail_json(msg="For TLSA records the params cert_usage, selector, hash_type and value all need to be defined, or not at all.")
|
||||
|
||||
if module.params['type'] == 'CAA':
|
||||
if not ((module.params['flag'] is not None and module.params['tag'] is not None
|
||||
and not (module.params['value'] is None or module.params['value'] == ''))
|
||||
or (module.params['flag'] is None and module.params['tag'] is None
|
||||
and (module.params['value'] is None or module.params['value'] == ''))):
|
||||
module.fail_json(msg="For CAA records the params flag, tag and value all need to be defined, or not at all.")
|
||||
|
||||
if module.params['type'] == 'DS':
|
||||
if not ((module.params['key_tag'] is not None and module.params['algorithm'] is not None and module.params['hash_type'] is not None
|
||||
and not (module.params['value'] is None or module.params['value'] == ''))
|
||||
|
||||
@@ -104,6 +104,7 @@ options:
|
||||
description:
|
||||
- The script/command that will be run periodically to check the health of the service.
|
||||
- Requires O(interval) to be provided.
|
||||
- Mutually exclusive with O(ttl), O(tcp) and O(http).
|
||||
interval:
|
||||
type: str
|
||||
description:
|
||||
@@ -131,6 +132,7 @@ options:
|
||||
Similar to the interval this is a number with a V(s) or V(m) suffix to
|
||||
signify the units of seconds or minutes, for example V(15s) or V(1m).
|
||||
If no suffix is supplied V(s) will be used by default, for example V(10) will be V(10s).
|
||||
- Mutually exclusive with O(script), O(tcp) and O(http).
|
||||
tcp:
|
||||
type: str
|
||||
description:
|
||||
@@ -138,6 +140,7 @@ options:
|
||||
will check if the connection attempt to that port is successful (that is, the port is currently accepting connections).
|
||||
The format is V(host:port), for example V(localhost:80).
|
||||
- Requires O(interval) to be provided.
|
||||
- Mutually exclusive with O(script), O(ttl) and O(http).
|
||||
version_added: '1.3.0'
|
||||
http:
|
||||
type: str
|
||||
@@ -145,6 +148,7 @@ options:
|
||||
- Checks can be registered with an HTTP endpoint. This means that consul
|
||||
will check that the http endpoint returns a successful HTTP status.
|
||||
- Requires O(interval) to be provided.
|
||||
- Mutually exclusive with O(script), O(ttl) and O(tcp).
|
||||
timeout:
|
||||
type: str
|
||||
description:
|
||||
@@ -159,7 +163,7 @@ options:
|
||||
ack_params_state_absent:
|
||||
type: bool
|
||||
description:
|
||||
- Disable deprecation warning when using parameters incompatible with O(state=absent).
|
||||
- This parameter has no more effect and is deprecated. It will be removed in community.general 10.0.0.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
@@ -377,13 +381,7 @@ def get_service_by_id_or_name(consul_api, service_id_or_name):
|
||||
|
||||
|
||||
def parse_check(module):
|
||||
_checks = [module.params[p] for p in ('script', 'ttl', 'tcp', 'http') if module.params[p]]
|
||||
|
||||
if len(_checks) > 1:
|
||||
module.fail_json(
|
||||
msg='checks are either script, tcp, http or ttl driven, supplying more than one does not make sense')
|
||||
|
||||
if module.params['check_id'] or _checks:
|
||||
if module.params['check_id'] or any(module.params[p] is not None for p in ('script', 'ttl', 'tcp', 'http')):
|
||||
return ConsulCheck(
|
||||
module.params['check_id'],
|
||||
module.params['check_name'],
|
||||
@@ -501,15 +499,9 @@ class ConsulCheck(object):
|
||||
self.check = consul.Check.ttl(self.ttl)
|
||||
|
||||
if http:
|
||||
if interval is None:
|
||||
raise Exception('http check must specify interval')
|
||||
|
||||
self.check = consul.Check.http(http, self.interval, self.timeout)
|
||||
|
||||
if tcp:
|
||||
if interval is None:
|
||||
raise Exception('tcp check must specify interval')
|
||||
|
||||
regex = r"(?P<host>.*):(?P<port>(?:[0-9]+))$"
|
||||
match = re.match(regex, tcp)
|
||||
|
||||
@@ -596,30 +588,33 @@ def main():
|
||||
timeout=dict(type='str'),
|
||||
tags=dict(type='list', elements='str'),
|
||||
token=dict(no_log=True),
|
||||
ack_params_state_absent=dict(type='bool'),
|
||||
ack_params_state_absent=dict(
|
||||
type='bool',
|
||||
removed_in_version='10.0.0',
|
||||
removed_from_collection='community.general',
|
||||
),
|
||||
),
|
||||
mutually_exclusive=[
|
||||
('script', 'ttl', 'tcp', 'http'),
|
||||
],
|
||||
required_if=[
|
||||
('state', 'present', ['service_name']),
|
||||
('state', 'absent', ['service_id', 'service_name', 'check_id', 'check_name'], True),
|
||||
],
|
||||
required_by={
|
||||
'script': 'interval',
|
||||
'http': 'interval',
|
||||
'tcp': 'interval',
|
||||
},
|
||||
supports_check_mode=False,
|
||||
)
|
||||
p = module.params
|
||||
|
||||
test_dependencies(module)
|
||||
if p['state'] == 'absent' and any(p[x] for x in ['script', 'ttl', 'tcp', 'http', 'interval']) and not p['ack_params_state_absent']:
|
||||
module.deprecate(
|
||||
"The use of parameters 'script', 'ttl', 'tcp', 'http', 'interval' along with 'state=absent' is deprecated. "
|
||||
"In community.general 8.0.0 their use will become an error. "
|
||||
"To suppress this deprecation notice, set parameter ack_params_state_absent=true.",
|
||||
version="8.0.0",
|
||||
collection_name="community.general",
|
||||
if p['state'] == 'absent' and any(p[x] for x in ['script', 'ttl', 'tcp', 'http', 'interval']):
|
||||
module.fail_json(
|
||||
msg="The use of parameters 'script', 'ttl', 'tcp', 'http', 'interval' along with 'state=absent' is no longer allowed."
|
||||
)
|
||||
# When reaching c.g 8.0.0:
|
||||
# - Replace the deprecation with a fail_json(), remove the "ack_params_state_absent" condition from the "if"
|
||||
# - Add mutually_exclusive for ('script', 'ttl', 'tcp', 'http'), then remove that validation from parse_check()
|
||||
# - Add required_by {'script': 'interval', 'http': 'interval', 'tcp': 'interval'}, then remove checks for 'interval' in ConsulCheck.__init__()
|
||||
# - Deprecate the parameter ack_params_state_absent
|
||||
|
||||
try:
|
||||
register_with_consul(module)
|
||||
|
||||
@@ -86,7 +86,7 @@ class EjabberdUser(object):
|
||||
object manages user creation and deletion using ejabberdctl. The following
|
||||
commands are currently supported:
|
||||
* ejabberdctl register
|
||||
* ejabberdctl unregister
|
||||
* ejabberdctl deregister
|
||||
"""
|
||||
|
||||
def __init__(self, module):
|
||||
|
||||
90
plugins/modules/facter_facts.py
Normal file
90
plugins/modules/facter_facts.py
Normal file
@@ -0,0 +1,90 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2023, Alexei Znamensky
|
||||
# Copyright (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: facter_facts
|
||||
short_description: Runs the discovery program C(facter) on the remote system and return Ansible facts
|
||||
version_added: 8.0.0
|
||||
description:
|
||||
- Runs the C(facter) discovery program
|
||||
(U(https://github.com/puppetlabs/facter)) on the remote system, returning Ansible facts from the
|
||||
JSON data that can be useful for inventory purposes.
|
||||
extends_documentation_fragment:
|
||||
- community.general.attributes
|
||||
- community.general.attributes.facts
|
||||
- community.general.attributes.facts_module
|
||||
options:
|
||||
arguments:
|
||||
description:
|
||||
- Specifies arguments for facter.
|
||||
type: list
|
||||
elements: str
|
||||
requirements:
|
||||
- facter
|
||||
- ruby-json
|
||||
author:
|
||||
- Ansible Core Team
|
||||
- Michael DeHaan
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Execute facter no arguments
|
||||
community.general.facter_facts:
|
||||
|
||||
- name: Execute facter with arguments
|
||||
community.general.facter_facts:
|
||||
arguments:
|
||||
- -p
|
||||
- system_uptime
|
||||
- timezone
|
||||
- is_virtual
|
||||
'''
|
||||
|
||||
RETURN = r'''
|
||||
ansible_facts:
|
||||
description: Dictionary with one key C(facter).
|
||||
returned: always
|
||||
type: dict
|
||||
contains:
|
||||
facter:
|
||||
description: Dictionary containing facts discovered in the remote system.
|
||||
returned: always
|
||||
type: dict
|
||||
'''
|
||||
|
||||
import json
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
arguments=dict(type='list', elements='str'),
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
facter_path = module.get_bin_path(
|
||||
'facter',
|
||||
opt_dirs=['/opt/puppetlabs/bin'])
|
||||
|
||||
cmd = [facter_path, "--json"]
|
||||
if module.params['arguments']:
|
||||
cmd += module.params['arguments']
|
||||
|
||||
rc, out, err = module.run_command(cmd, check_rc=True)
|
||||
module.exit_json(ansible_facts=dict(facter=json.loads(out)))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -47,9 +47,8 @@ options:
|
||||
type: str
|
||||
description:
|
||||
- The action to take upon the key/value.
|
||||
- State V(get) is deprecated and will be removed in community.general 8.0.0. Please use the module M(community.general.gconftool2_info) instead.
|
||||
required: true
|
||||
choices: [ absent, get, present ]
|
||||
choices: [ absent, present ]
|
||||
config_source:
|
||||
type: str
|
||||
description:
|
||||
@@ -114,7 +113,7 @@ class GConftool(StateModuleHelper):
|
||||
key=dict(type='str', required=True, no_log=False),
|
||||
value_type=dict(type='str', choices=['bool', 'float', 'int', 'string']),
|
||||
value=dict(type='str'),
|
||||
state=dict(type='str', required=True, choices=['absent', 'get', 'present']),
|
||||
state=dict(type='str', required=True, choices=['absent', 'present']),
|
||||
direct=dict(type='bool', default=False),
|
||||
config_source=dict(type='str'),
|
||||
),
|
||||
@@ -149,12 +148,6 @@ class GConftool(StateModuleHelper):
|
||||
def _get(self):
|
||||
return self.runner("state key", output_process=self._make_process(False)).run(state="get")
|
||||
|
||||
def state_get(self):
|
||||
self.deprecate(
|
||||
msg="State 'get' is deprecated. Please use the module community.general.gconftool2_info instead",
|
||||
version="8.0.0", collection_name="community.general"
|
||||
)
|
||||
|
||||
def state_absent(self):
|
||||
with self.runner("state key", output_process=self._make_process(False)) as ctx:
|
||||
ctx.run()
|
||||
|
||||
@@ -121,7 +121,7 @@ from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.gitlab import (
|
||||
auth_argument_spec, find_project, gitlab_authentication, gitlab, ensure_gitlab_package, list_all_kwargs
|
||||
auth_argument_spec, find_project, gitlab_authentication, gitlab, ensure_gitlab_package
|
||||
)
|
||||
|
||||
|
||||
@@ -209,7 +209,8 @@ class GitLabDeployKey(object):
|
||||
@param key_title Title of the key
|
||||
'''
|
||||
def find_deploy_key(self, project, key_title):
|
||||
for deploy_key in project.keys.list(**list_all_kwargs):
|
||||
deploy_keys = project.keys.list(all=True)
|
||||
for deploy_key in deploy_keys:
|
||||
if (deploy_key.title == key_title):
|
||||
return deploy_key
|
||||
|
||||
|
||||
@@ -160,7 +160,7 @@ from ansible.module_utils.api import basic_auth_argument_spec
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.gitlab import (
|
||||
auth_argument_spec, gitlab_authentication, gitlab, ensure_gitlab_package, list_all_kwargs
|
||||
auth_argument_spec, gitlab_authentication, gitlab, ensure_gitlab_package
|
||||
)
|
||||
|
||||
|
||||
@@ -171,20 +171,16 @@ class GitLabGroup(object):
|
||||
|
||||
# get user id if the user exists
|
||||
def get_user_id(self, gitlab_user):
|
||||
return next(
|
||||
(u.id for u in self._gitlab.users.list(username=gitlab_user, **list_all_kwargs)),
|
||||
None
|
||||
)
|
||||
user_exists = self._gitlab.users.list(username=gitlab_user, all=True)
|
||||
if user_exists:
|
||||
return user_exists[0].id
|
||||
|
||||
# get group id if group exists
|
||||
def get_group_id(self, gitlab_group):
|
||||
return next(
|
||||
(
|
||||
g.id for g in self._gitlab.groups.list(search=gitlab_group, **list_all_kwargs)
|
||||
if g.full_path == gitlab_group
|
||||
),
|
||||
None
|
||||
)
|
||||
groups = self._gitlab.groups.list(search=gitlab_group, all=True)
|
||||
for group in groups:
|
||||
if group.full_path == gitlab_group:
|
||||
return group.id
|
||||
|
||||
# get all members in a group
|
||||
def get_members_in_a_group(self, gitlab_group_id):
|
||||
|
||||
@@ -207,8 +207,7 @@ group_variable:
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.api import basic_auth_argument_spec
|
||||
from ansible_collections.community.general.plugins.module_utils.gitlab import (
|
||||
auth_argument_spec, gitlab_authentication, ensure_gitlab_package, filter_returned_variables, vars_to_variables,
|
||||
list_all_kwargs
|
||||
auth_argument_spec, gitlab_authentication, ensure_gitlab_package, filter_returned_variables, vars_to_variables
|
||||
)
|
||||
|
||||
|
||||
@@ -223,7 +222,14 @@ class GitlabGroupVariables(object):
|
||||
return self.repo.groups.get(group_name)
|
||||
|
||||
def list_all_group_variables(self):
|
||||
return list(self.group.variables.list(**list_all_kwargs))
|
||||
page_nb = 1
|
||||
variables = []
|
||||
vars_page = self.group.variables.list(page=page_nb)
|
||||
while len(vars_page) > 0:
|
||||
variables += vars_page
|
||||
page_nb += 1
|
||||
vars_page = self.group.variables.list(page=page_nb)
|
||||
return variables
|
||||
|
||||
def create_variable(self, var_obj):
|
||||
if self._module.check_mode:
|
||||
|
||||
@@ -171,7 +171,7 @@ from ansible.module_utils.api import basic_auth_argument_spec
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.gitlab import (
|
||||
auth_argument_spec, find_project, gitlab_authentication, ensure_gitlab_package, list_all_kwargs
|
||||
auth_argument_spec, find_project, gitlab_authentication, ensure_gitlab_package
|
||||
)
|
||||
|
||||
|
||||
@@ -266,7 +266,8 @@ class GitLabHook(object):
|
||||
@param hook_url Url to call on event
|
||||
'''
|
||||
def find_hook(self, project, hook_url):
|
||||
for hook in project.hooks.list(**list_all_kwargs):
|
||||
hooks = project.hooks.list(all=True)
|
||||
for hook in hooks:
|
||||
if (hook.url == hook_url):
|
||||
return hook
|
||||
|
||||
|
||||
@@ -139,7 +139,7 @@ instance_variable:
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.api import basic_auth_argument_spec
|
||||
from ansible_collections.community.general.plugins.module_utils.gitlab import (
|
||||
auth_argument_spec, gitlab_authentication, ensure_gitlab_package, filter_returned_variables, list_all_kwargs
|
||||
auth_argument_spec, gitlab_authentication, ensure_gitlab_package, filter_returned_variables
|
||||
)
|
||||
|
||||
|
||||
@@ -150,7 +150,14 @@ class GitlabInstanceVariables(object):
|
||||
self._module = module
|
||||
|
||||
def list_all_instance_variables(self):
|
||||
return list(self.instance.variables.list(**list_all_kwargs))
|
||||
page_nb = 1
|
||||
variables = []
|
||||
gl_varibales_page = self.instance.variables.list(page=page_nb)
|
||||
while len(gl_varibales_page) > 0:
|
||||
variables += gl_varibales_page
|
||||
page_nb += 1
|
||||
gl_varibales_page = self.instance.variables.list(page=page_nb)
|
||||
return variables
|
||||
|
||||
def create_variable(self, var_obj):
|
||||
if self._module.check_mode:
|
||||
|
||||
@@ -175,8 +175,10 @@ options:
|
||||
version_added: "4.2.0"
|
||||
default_branch:
|
||||
description:
|
||||
- Default branch name for a new project.
|
||||
- This option is only used on creation, not for updates. This is also only used if O(initialize_with_readme=true).
|
||||
- The default branch name for this project.
|
||||
- For project creation, this option requires O(initialize_with_readme=true).
|
||||
- For project update, the branch must exist.
|
||||
- Supports project's default branch update since community.general 8.0.0.
|
||||
type: str
|
||||
version_added: "4.2.0"
|
||||
builds_access_level:
|
||||
@@ -355,7 +357,7 @@ class GitLabProject(object):
|
||||
@param namespace Namespace Object (User or Group)
|
||||
@param options Options of the project
|
||||
'''
|
||||
def create_or_update_project(self, project_name, namespace, options):
|
||||
def create_or_update_project(self, module, project_name, namespace, options):
|
||||
changed = False
|
||||
project_options = {
|
||||
'name': project_name,
|
||||
@@ -395,6 +397,8 @@ class GitLabProject(object):
|
||||
|
||||
# Because we have already call userExists in main()
|
||||
if self.project_object is None:
|
||||
if options['default_branch'] and not options['initialize_with_readme']:
|
||||
module.fail_json(msg="Param default_branch need param initialize_with_readme set to true")
|
||||
project_options.update({
|
||||
'path': options['path'],
|
||||
'import_url': options['import_url'],
|
||||
@@ -416,6 +420,8 @@ class GitLabProject(object):
|
||||
|
||||
changed = True
|
||||
else:
|
||||
if options['default_branch']:
|
||||
project_options['default_branch'] = options['default_branch']
|
||||
changed, project = self.update_project(self.project_object, project_options)
|
||||
|
||||
self.project_object = project
|
||||
@@ -590,9 +596,6 @@ def main():
|
||||
security_and_compliance_access_level = module.params['security_and_compliance_access_level']
|
||||
topics = module.params['topics']
|
||||
|
||||
if default_branch and not initialize_with_readme:
|
||||
module.fail_json(msg="Param default_branch need param initialize_with_readme set to true")
|
||||
|
||||
gitlab_instance = gitlab_authentication(module)
|
||||
|
||||
# Set project_path to project_name if it is empty.
|
||||
@@ -636,7 +639,7 @@ def main():
|
||||
|
||||
if state == 'present':
|
||||
|
||||
if gitlab_project.create_or_update_project(project_name, namespace, {
|
||||
if gitlab_project.create_or_update_project(module, project_name, namespace, {
|
||||
"path": project_path,
|
||||
"description": project_description,
|
||||
"initialize_with_readme": initialize_with_readme,
|
||||
|
||||
@@ -97,7 +97,7 @@ from ansible.module_utils.api import basic_auth_argument_spec
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.gitlab import (
|
||||
auth_argument_spec, gitlab_authentication, find_project, ensure_gitlab_package, list_all_kwargs
|
||||
auth_argument_spec, gitlab_authentication, find_project, ensure_gitlab_package
|
||||
)
|
||||
|
||||
|
||||
@@ -105,7 +105,7 @@ def present_strategy(module, gl, project, wished_badge):
|
||||
changed = False
|
||||
|
||||
existing_badge = None
|
||||
for badge in project.badges.list(**list_all_kwargs):
|
||||
for badge in project.badges.list(iterator=True):
|
||||
if badge.image_url == wished_badge["image_url"]:
|
||||
existing_badge = badge
|
||||
break
|
||||
@@ -135,7 +135,7 @@ def absent_strategy(module, gl, project, wished_badge):
|
||||
changed = False
|
||||
|
||||
existing_badge = None
|
||||
for badge in project.badges.list(**list_all_kwargs):
|
||||
for badge in project.badges.list(iterator=True):
|
||||
if badge.image_url == wished_badge["image_url"]:
|
||||
existing_badge = badge
|
||||
break
|
||||
|
||||
@@ -227,7 +227,7 @@ from ansible.module_utils.api import basic_auth_argument_spec
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.gitlab import (
|
||||
auth_argument_spec, gitlab_authentication, ensure_gitlab_package, filter_returned_variables, vars_to_variables,
|
||||
list_all_kwargs, HAS_GITLAB_PACKAGE, GITLAB_IMP_ERR
|
||||
HAS_GITLAB_PACKAGE, GITLAB_IMP_ERR
|
||||
)
|
||||
|
||||
|
||||
@@ -242,7 +242,14 @@ class GitlabProjectVariables(object):
|
||||
return self.repo.projects.get(project_name)
|
||||
|
||||
def list_all_project_variables(self):
|
||||
return list(self.project.variables.list(**list_all_kwargs))
|
||||
page_nb = 1
|
||||
variables = []
|
||||
vars_page = self.project.variables.list(page=page_nb)
|
||||
while len(vars_page) > 0:
|
||||
variables += vars_page
|
||||
page_nb += 1
|
||||
vars_page = self.project.variables.list(page=page_nb)
|
||||
return variables
|
||||
|
||||
def create_variable(self, var_obj):
|
||||
if self._module.check_mode:
|
||||
|
||||
@@ -103,9 +103,9 @@ options:
|
||||
is only applied on updates.
|
||||
- If set to V(not_protected), runner can pick up jobs from both protected and unprotected branches.
|
||||
- If set to V(ref_protected), runner can pick up jobs only from protected branches.
|
||||
- The current default is V(ref_protected). This will change to no default in community.general 8.0.0.
|
||||
From that version on, if this option is not specified explicitly, GitLab will use V(not_protected)
|
||||
on creation, and the value set will not be changed on any updates.
|
||||
- Before community.general 8.0.0 the default was V(ref_protected). This was changed to no default in community.general 8.0.0.
|
||||
If this option is not specified explicitly, GitLab will use V(not_protected) on creation, and the value set
|
||||
will not be changed on any updates.
|
||||
required: false
|
||||
choices: ["not_protected", "ref_protected"]
|
||||
type: str
|
||||
@@ -206,7 +206,7 @@ from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.gitlab import (
|
||||
auth_argument_spec, gitlab_authentication, gitlab, ensure_gitlab_package, list_all_kwargs
|
||||
auth_argument_spec, gitlab_authentication, gitlab, ensure_gitlab_package
|
||||
)
|
||||
|
||||
|
||||
@@ -309,7 +309,7 @@ class GitLabRunner(object):
|
||||
@param description Description of the runner
|
||||
'''
|
||||
def find_runner(self, description):
|
||||
runners = self._runners_endpoint(**list_all_kwargs)
|
||||
runners = self._runners_endpoint(as_list=False)
|
||||
|
||||
for runner in runners:
|
||||
# python-gitlab 2.2 through at least 2.5 returns a list of dicts for list() instead of a Runner
|
||||
@@ -398,15 +398,6 @@ def main():
|
||||
project = module.params['project']
|
||||
group = module.params['group']
|
||||
|
||||
if access_level is None:
|
||||
message = "The option 'access_level' is unspecified, so 'ref_protected' is assumed. "\
|
||||
"In order to align the module with GitLab's runner API, this option will lose "\
|
||||
"its default value in community.general 8.0.0. From that version on, you must set "\
|
||||
"this option to 'ref_protected' explicitly, if you want to have a protected runner, "\
|
||||
"otherwise GitLab's default access level gets applied, which is 'not_protected'"
|
||||
module.deprecate(message, version='8.0.0', collection_name='community.general')
|
||||
access_level = 'ref_protected'
|
||||
|
||||
gitlab_instance = gitlab_authentication(module)
|
||||
gitlab_project = None
|
||||
gitlab_group = None
|
||||
|
||||
@@ -234,7 +234,7 @@ from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.gitlab import (
|
||||
auth_argument_spec, find_group, gitlab_authentication, gitlab, ensure_gitlab_package, list_all_kwargs
|
||||
auth_argument_spec, find_group, gitlab_authentication, gitlab, ensure_gitlab_package
|
||||
)
|
||||
|
||||
|
||||
@@ -349,10 +349,9 @@ class GitLabUser(object):
|
||||
@param sshkey_name Name of the ssh key
|
||||
'''
|
||||
def ssh_key_exists(self, user, sshkey_name):
|
||||
return any(
|
||||
k.title == sshkey_name
|
||||
for k in user.keys.list(**list_all_kwargs)
|
||||
)
|
||||
keyList = map(lambda k: k.title, user.keys.list(all=True))
|
||||
|
||||
return sshkey_name in keyList
|
||||
|
||||
'''
|
||||
@param user User object
|
||||
@@ -520,13 +519,10 @@ class GitLabUser(object):
|
||||
@param username Username of the user
|
||||
'''
|
||||
def find_user(self, username):
|
||||
return next(
|
||||
(
|
||||
user for user in self._gitlab.users.list(search=username, **list_all_kwargs)
|
||||
if user.username == username
|
||||
),
|
||||
None
|
||||
)
|
||||
users = self._gitlab.users.list(search=username, all=True)
|
||||
for user in users:
|
||||
if (user.username == username):
|
||||
return user
|
||||
|
||||
'''
|
||||
@param username Username of the user
|
||||
|
||||
@@ -343,7 +343,7 @@ class HAProxy(object):
|
||||
|
||||
if state is not None:
|
||||
self.execute(Template(cmd).substitute(pxname=backend, svname=svname))
|
||||
if self.wait and not (wait_for_status == "DRAIN" and state == "DOWN"):
|
||||
if self.wait:
|
||||
self.wait_until_status(backend, svname, wait_for_status)
|
||||
|
||||
def get_state_for(self, pxname, svname):
|
||||
|
||||
@@ -165,7 +165,6 @@ changed_pkgs:
|
||||
version_added: '0.2.0'
|
||||
'''
|
||||
|
||||
import json
|
||||
import os.path
|
||||
import re
|
||||
|
||||
@@ -185,10 +184,6 @@ def _create_regex_group_complement(s):
|
||||
chars = filter(None, (line.split('#')[0].strip() for line in lines))
|
||||
group = r'[^' + r''.join(chars) + r']'
|
||||
return re.compile(group)
|
||||
|
||||
|
||||
def _check_package_in_json(json_output, package_type):
|
||||
return bool(json_output.get(package_type, []) and json_output[package_type][0].get("installed"))
|
||||
# /utils ------------------------------------------------------------------ }}}
|
||||
|
||||
|
||||
@@ -484,17 +479,17 @@ class Homebrew(object):
|
||||
cmd = [
|
||||
"{brew_path}".format(brew_path=self.brew_path),
|
||||
"info",
|
||||
"--json=v2",
|
||||
self.current_package,
|
||||
]
|
||||
rc, out, err = self.module.run_command(cmd)
|
||||
if err:
|
||||
self.failed = True
|
||||
self.message = err.strip()
|
||||
raise HomebrewException(self.message)
|
||||
data = json.loads(out)
|
||||
for line in out.split('\n'):
|
||||
if (
|
||||
re.search(r'Built from source', line)
|
||||
or re.search(r'Poured from bottle', line)
|
||||
):
|
||||
return True
|
||||
|
||||
return _check_package_in_json(data, "formulae") or _check_package_in_json(data, "casks")
|
||||
return False
|
||||
|
||||
def _current_package_is_outdated(self):
|
||||
if not self.valid_package(self.current_package):
|
||||
|
||||
@@ -70,8 +70,7 @@ options:
|
||||
does not exist it will fail.
|
||||
notes:
|
||||
- "This module depends on the C(passlib) Python library, which needs to be installed on all target systems."
|
||||
- "On Debian < 11, Ubuntu <= 20.04, or Fedora: install C(python-passlib)."
|
||||
- "On Debian, Ubuntu: install C(python3-passlib)."
|
||||
- "On Debian, Ubuntu, or Fedora: install C(python-passlib)."
|
||||
- "On RHEL or CentOS: Enable EPEL, then install C(python-passlib)."
|
||||
requirements: [ passlib>=1.6 ]
|
||||
author: "Ansible Core Team"
|
||||
@@ -107,22 +106,16 @@ EXAMPLES = """
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
import traceback
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible_collections.community.general.plugins.module_utils import deps
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||
|
||||
PASSLIB_IMP_ERR = None
|
||||
try:
|
||||
with deps.declare("passlib"):
|
||||
from passlib.apache import HtpasswdFile, htpasswd_context
|
||||
from passlib.context import CryptContext
|
||||
import passlib
|
||||
except ImportError:
|
||||
PASSLIB_IMP_ERR = traceback.format_exc()
|
||||
passlib_installed = False
|
||||
else:
|
||||
passlib_installed = True
|
||||
|
||||
|
||||
apache_hashes = ["apr_md5_crypt", "des_crypt", "ldap_sha1", "plaintext"]
|
||||
|
||||
@@ -147,36 +140,20 @@ def present(dest, username, password, hash_scheme, create, check_mode):
|
||||
if check_mode:
|
||||
return ("Create %s" % dest, True)
|
||||
create_missing_directories(dest)
|
||||
if LooseVersion(passlib.__version__) >= LooseVersion('1.6'):
|
||||
ht = HtpasswdFile(dest, new=True, default_scheme=hash_scheme, context=context)
|
||||
else:
|
||||
ht = HtpasswdFile(dest, autoload=False, default=hash_scheme, context=context)
|
||||
if getattr(ht, 'set_password', None):
|
||||
ht.set_password(username, password)
|
||||
else:
|
||||
ht.update(username, password)
|
||||
ht = HtpasswdFile(dest, new=True, default_scheme=hash_scheme, context=context)
|
||||
ht.set_password(username, password)
|
||||
ht.save()
|
||||
return ("Created %s and added %s" % (dest, username), True)
|
||||
else:
|
||||
if LooseVersion(passlib.__version__) >= LooseVersion('1.6'):
|
||||
ht = HtpasswdFile(dest, new=False, default_scheme=hash_scheme, context=context)
|
||||
else:
|
||||
ht = HtpasswdFile(dest, default=hash_scheme, context=context)
|
||||
ht = HtpasswdFile(dest, new=False, default_scheme=hash_scheme, context=context)
|
||||
|
||||
found = None
|
||||
if getattr(ht, 'check_password', None):
|
||||
found = ht.check_password(username, password)
|
||||
else:
|
||||
found = ht.verify(username, password)
|
||||
found = ht.check_password(username, password)
|
||||
|
||||
if found:
|
||||
return ("%s already present" % username, False)
|
||||
else:
|
||||
if not check_mode:
|
||||
if getattr(ht, 'set_password', None):
|
||||
ht.set_password(username, password)
|
||||
else:
|
||||
ht.update(username, password)
|
||||
ht.set_password(username, password)
|
||||
ht.save()
|
||||
return ("Add/update %s" % username, True)
|
||||
|
||||
@@ -185,10 +162,7 @@ def absent(dest, username, check_mode):
|
||||
""" Ensures user is absent
|
||||
|
||||
Returns (msg, changed) """
|
||||
if LooseVersion(passlib.__version__) >= LooseVersion('1.6'):
|
||||
ht = HtpasswdFile(dest, new=False)
|
||||
else:
|
||||
ht = HtpasswdFile(dest)
|
||||
ht = HtpasswdFile(dest, new=False)
|
||||
|
||||
if username not in ht.users():
|
||||
return ("%s not present" % username, False)
|
||||
@@ -234,20 +208,13 @@ def main():
|
||||
create = module.params['create']
|
||||
check_mode = module.check_mode
|
||||
|
||||
if not passlib_installed:
|
||||
module.fail_json(msg=missing_required_lib("passlib"), exception=PASSLIB_IMP_ERR)
|
||||
deps.validate(module)
|
||||
|
||||
# TODO double check if this hack below is still needed.
|
||||
# Check file for blank lines in effort to avoid "need more than 1 value to unpack" error.
|
||||
try:
|
||||
f = open(path, "r")
|
||||
except IOError:
|
||||
# No preexisting file to remove blank lines from
|
||||
f = None
|
||||
else:
|
||||
try:
|
||||
with open(path, "r") as f:
|
||||
lines = f.readlines()
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
# If the file gets edited, it returns true, so only edit the file if it has blank lines
|
||||
strip = False
|
||||
@@ -261,11 +228,12 @@ def main():
|
||||
if check_mode:
|
||||
temp = tempfile.NamedTemporaryFile()
|
||||
path = temp.name
|
||||
f = open(path, "w")
|
||||
try:
|
||||
[f.write(line) for line in lines if line.strip()]
|
||||
finally:
|
||||
f.close()
|
||||
with open(path, "w") as f:
|
||||
f.writelines(line for line in lines if line.strip())
|
||||
|
||||
except IOError:
|
||||
# No preexisting file to remove blank lines from
|
||||
pass
|
||||
|
||||
try:
|
||||
if state == 'present':
|
||||
|
||||
@@ -106,7 +106,7 @@ options:
|
||||
type: str
|
||||
description:
|
||||
- The IP address of the host.
|
||||
required: true
|
||||
- This is no longer required since community.general 8.0.0.
|
||||
variables:
|
||||
type: dict
|
||||
description:
|
||||
@@ -243,7 +243,7 @@ def main():
|
||||
template=dict(default=None),
|
||||
check_command=dict(default="hostalive"),
|
||||
display_name=dict(default=None),
|
||||
ip=dict(required=True),
|
||||
ip=dict(),
|
||||
variables=dict(type='dict', default=None),
|
||||
)
|
||||
|
||||
|
||||
@@ -43,8 +43,8 @@ options:
|
||||
description:
|
||||
- Section name in INI file. This is added if O(state=present) automatically when
|
||||
a single value is being set.
|
||||
- If being omitted, the O(option) will be placed before the first O(section).
|
||||
- Omitting O(section) is also required if the config format does not support sections.
|
||||
- If left empty, being omitted, or being set to V(null), the O(option) will be placed before the first O(section).
|
||||
- Using V(null) is also required if the config format does not support sections.
|
||||
type: str
|
||||
option:
|
||||
description:
|
||||
@@ -116,6 +116,14 @@ options:
|
||||
- Allow option without value and without '=' symbol.
|
||||
type: bool
|
||||
default: false
|
||||
modify_inactive_option:
|
||||
description:
|
||||
- By default the module replaces a commented line that matches the given option.
|
||||
- Set this option to V(false) to avoid this. This is useful when you want to keep commented example
|
||||
C(key=value) pairs for documentation purposes.
|
||||
type: bool
|
||||
default: true
|
||||
version_added: 8.0.0
|
||||
follow:
|
||||
description:
|
||||
- This flag indicates that filesystem links, if they exist, should be followed.
|
||||
@@ -171,13 +179,6 @@ EXAMPLES = r'''
|
||||
- pepsi
|
||||
mode: '0600'
|
||||
state: present
|
||||
|
||||
- name: Add "beverage=lemon juice" outside a section in specified file
|
||||
community.general.ini_file:
|
||||
path: /etc/conf
|
||||
option: beverage
|
||||
value: lemon juice
|
||||
state: present
|
||||
'''
|
||||
|
||||
import io
|
||||
@@ -197,7 +198,7 @@ def match_opt(option, line):
|
||||
|
||||
def match_active_opt(option, line):
|
||||
option = re.escape(option)
|
||||
return re.match('( |\t)*(%s)( |\t)*(=|$)( |\t)*(.*)' % option, line)
|
||||
return re.match('()( |\t)*(%s)( |\t)*(=|$)( |\t)*(.*)' % option, line)
|
||||
|
||||
|
||||
def update_section_line(option, changed, section_lines, index, changed_lines, ignore_spaces, newline, msg):
|
||||
@@ -220,7 +221,7 @@ def update_section_line(option, changed, section_lines, index, changed_lines, ig
|
||||
|
||||
def do_ini(module, filename, section=None, option=None, values=None,
|
||||
state='present', exclusive=True, backup=False, no_extra_spaces=False,
|
||||
ignore_spaces=False, create=True, allow_no_value=False, follow=False):
|
||||
ignore_spaces=False, create=True, allow_no_value=False, modify_inactive_option=True, follow=False):
|
||||
|
||||
if section is not None:
|
||||
section = to_text(section)
|
||||
@@ -317,6 +318,12 @@ def do_ini(module, filename, section=None, option=None, values=None,
|
||||
# Keep track of changed section_lines
|
||||
changed_lines = [0] * len(section_lines)
|
||||
|
||||
# Determine whether to consider using commented out/inactive options or only active ones
|
||||
if modify_inactive_option:
|
||||
match_function = match_opt
|
||||
else:
|
||||
match_function = match_active_opt
|
||||
|
||||
# handling multiple instances of option=value when state is 'present' with/without exclusive is a bit complex
|
||||
#
|
||||
# 1. edit all lines where we have a option=value pair with a matching value in values[]
|
||||
@@ -326,8 +333,8 @@ def do_ini(module, filename, section=None, option=None, values=None,
|
||||
|
||||
if state == 'present' and option:
|
||||
for index, line in enumerate(section_lines):
|
||||
if match_opt(option, line):
|
||||
match = match_opt(option, line)
|
||||
if match_function(option, line):
|
||||
match = match_function(option, line)
|
||||
if values and match.group(7) in values:
|
||||
matched_value = match.group(7)
|
||||
if not matched_value and allow_no_value:
|
||||
@@ -350,14 +357,14 @@ def do_ini(module, filename, section=None, option=None, values=None,
|
||||
# override option with no value to option with value if not allow_no_value
|
||||
if len(values) > 0:
|
||||
for index, line in enumerate(section_lines):
|
||||
if not changed_lines[index] and match_opt(option, line):
|
||||
if not changed_lines[index] and match_function(option, line):
|
||||
newline = assignment_format % (option, values.pop(0))
|
||||
(changed, msg) = update_section_line(option, changed, section_lines, index, changed_lines, ignore_spaces, newline, msg)
|
||||
if len(values) == 0:
|
||||
break
|
||||
# remove all remaining option occurrences from the rest of the section
|
||||
for index in range(len(section_lines) - 1, 0, -1):
|
||||
if not changed_lines[index] and match_opt(option, section_lines[index]):
|
||||
if not changed_lines[index] and match_function(option, section_lines[index]):
|
||||
del section_lines[index]
|
||||
del changed_lines[index]
|
||||
changed = True
|
||||
@@ -401,7 +408,7 @@ def do_ini(module, filename, section=None, option=None, values=None,
|
||||
section_lines = new_section_lines
|
||||
elif not exclusive and len(values) > 0:
|
||||
# delete specified option=value line(s)
|
||||
new_section_lines = [i for i in section_lines if not (match_active_opt(option, i) and match_active_opt(option, i).group(6) in values)]
|
||||
new_section_lines = [i for i in section_lines if not (match_active_opt(option, i) and match_active_opt(option, i).group(7) in values)]
|
||||
if section_lines != new_section_lines:
|
||||
changed = True
|
||||
msg = 'option changed'
|
||||
@@ -450,7 +457,7 @@ def do_ini(module, filename, section=None, option=None, values=None,
|
||||
module.fail_json(msg="Unable to create temporary file %s", traceback=traceback.format_exc())
|
||||
|
||||
try:
|
||||
module.atomic_move(tmpfile, os.path.abspath(target_filename))
|
||||
module.atomic_move(tmpfile, target_filename)
|
||||
except IOError:
|
||||
module.ansible.fail_json(msg='Unable to move temporary \
|
||||
file %s to %s, IOError' % (tmpfile, target_filename), traceback=traceback.format_exc())
|
||||
@@ -473,6 +480,7 @@ def main():
|
||||
no_extra_spaces=dict(type='bool', default=False),
|
||||
ignore_spaces=dict(type='bool', default=False),
|
||||
allow_no_value=dict(type='bool', default=False),
|
||||
modify_inactive_option=dict(type='bool', default=True),
|
||||
create=dict(type='bool', default=True),
|
||||
follow=dict(type='bool', default=False)
|
||||
),
|
||||
@@ -494,6 +502,7 @@ def main():
|
||||
no_extra_spaces = module.params['no_extra_spaces']
|
||||
ignore_spaces = module.params['ignore_spaces']
|
||||
allow_no_value = module.params['allow_no_value']
|
||||
modify_inactive_option = module.params['modify_inactive_option']
|
||||
create = module.params['create']
|
||||
follow = module.params['follow']
|
||||
|
||||
@@ -507,7 +516,7 @@ def main():
|
||||
|
||||
(changed, backup_file, diff, msg) = do_ini(
|
||||
module, path, section, option, values, state, exclusive, backup,
|
||||
no_extra_spaces, ignore_spaces, create, allow_no_value, follow)
|
||||
no_extra_spaces, ignore_spaces, create, allow_no_value, modify_inactive_option, follow)
|
||||
|
||||
if not module.check_mode and os.path.exists(path):
|
||||
file_args = module.load_file_common_arguments(module.params)
|
||||
|
||||
@@ -12,14 +12,14 @@ __metaclass__ = type
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: interfaces_file
|
||||
short_description: Tweak settings in C(/etc/network/interfaces) files
|
||||
short_description: Tweak settings in /etc/network/interfaces files
|
||||
extends_documentation_fragment:
|
||||
- ansible.builtin.files
|
||||
- community.general.attributes
|
||||
description:
|
||||
- Manage (add, remove, change) individual interface options in an interfaces-style file without having
|
||||
to manage the file as a whole with, say, M(ansible.builtin.template) or M(ansible.builtin.assemble). Interface has to be presented in a file.
|
||||
- Read information about interfaces from interfaces-styled files.
|
||||
- Read information about interfaces from interfaces-styled files
|
||||
attributes:
|
||||
check_mode:
|
||||
support: full
|
||||
@@ -29,27 +29,27 @@ options:
|
||||
dest:
|
||||
type: path
|
||||
description:
|
||||
- Path to the interfaces file.
|
||||
- Path to the interfaces file
|
||||
default: /etc/network/interfaces
|
||||
iface:
|
||||
type: str
|
||||
description:
|
||||
- Name of the interface, required for value changes or option remove.
|
||||
- Name of the interface, required for value changes or option remove
|
||||
address_family:
|
||||
type: str
|
||||
description:
|
||||
- Address family of the interface, useful if same interface name is used for both V(inet) and V(inet6).
|
||||
- Address family of the interface, useful if same interface name is used for both inet and inet6
|
||||
option:
|
||||
type: str
|
||||
description:
|
||||
- Name of the option, required for value changes or option remove.
|
||||
- Name of the option, required for value changes or option remove
|
||||
value:
|
||||
type: str
|
||||
description:
|
||||
- If O(option) is not presented for the O(iface) and O(state) is V(present) option will be added.
|
||||
If O(option) already exists and is not V(pre-up), V(up), V(post-up) or V(down), it's value will be updated.
|
||||
V(pre-up), V(up), V(post-up) and V(down) options cannot be updated, only adding new options, removing existing
|
||||
ones or cleaning the whole option set are supported.
|
||||
ones or cleaning the whole option set are supported
|
||||
backup:
|
||||
description:
|
||||
- Create a backup file including the timestamp information so you can get
|
||||
@@ -64,76 +64,72 @@ options:
|
||||
choices: [ "present", "absent" ]
|
||||
|
||||
notes:
|
||||
- If option is defined multiple times last one will be updated but all will be deleted in case of an absent state.
|
||||
- If option is defined multiple times last one will be updated but all will be deleted in case of an absent state
|
||||
requirements: []
|
||||
author: "Roman Belyakovsky (@hryamzik)"
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
dest:
|
||||
description: Destination file/path.
|
||||
description: destination file/path
|
||||
returned: success
|
||||
type: str
|
||||
sample: "/etc/network/interfaces"
|
||||
ifaces:
|
||||
description: Interfaces dictionary.
|
||||
description: interfaces dictionary
|
||||
returned: success
|
||||
type: dict
|
||||
type: complex
|
||||
contains:
|
||||
ifaces:
|
||||
description: Interface dictionary.
|
||||
description: interface dictionary
|
||||
returned: success
|
||||
type: dict
|
||||
contains:
|
||||
eth0:
|
||||
description: Name of the interface.
|
||||
description: Name of the interface
|
||||
returned: success
|
||||
type: dict
|
||||
contains:
|
||||
address_family:
|
||||
description: Interface address family.
|
||||
description: interface address family
|
||||
returned: success
|
||||
type: str
|
||||
sample: "inet"
|
||||
method:
|
||||
description: Interface method.
|
||||
description: interface method
|
||||
returned: success
|
||||
type: str
|
||||
sample: "manual"
|
||||
mtu:
|
||||
description: Other options, all values returned as strings.
|
||||
description: other options, all values returned as strings
|
||||
returned: success
|
||||
type: str
|
||||
sample: "1500"
|
||||
pre-up:
|
||||
description: List of C(pre-up) scripts.
|
||||
description: list of C(pre-up) scripts
|
||||
returned: success
|
||||
type: list
|
||||
elements: str
|
||||
sample:
|
||||
- "route add -net 10.10.10.0/24 gw 10.10.10.1 dev eth1"
|
||||
- "route add -net 10.10.11.0/24 gw 10.10.11.1 dev eth2"
|
||||
up:
|
||||
description: List of C(up) scripts.
|
||||
description: list of C(up) scripts
|
||||
returned: success
|
||||
type: list
|
||||
elements: str
|
||||
sample:
|
||||
- "route add -net 10.10.10.0/24 gw 10.10.10.1 dev eth1"
|
||||
- "route add -net 10.10.11.0/24 gw 10.10.11.1 dev eth2"
|
||||
post-up:
|
||||
description: List of C(post-up) scripts.
|
||||
description: list of C(post-up) scripts
|
||||
returned: success
|
||||
type: list
|
||||
elements: str
|
||||
sample:
|
||||
- "route add -net 10.10.10.0/24 gw 10.10.10.1 dev eth1"
|
||||
- "route add -net 10.10.11.0/24 gw 10.10.11.1 dev eth2"
|
||||
down:
|
||||
description: List of C(down) scripts.
|
||||
description: list of C(down) scripts
|
||||
returned: success
|
||||
type: list
|
||||
elements: str
|
||||
sample:
|
||||
- "route del -net 10.10.10.0/24 gw 10.10.10.1 dev eth1"
|
||||
- "route del -net 10.10.11.0/24 gw 10.10.11.1 dev eth2"
|
||||
@@ -340,8 +336,6 @@ def addOptionAfterLine(option, value, iface, lines, last_line_dict, iface_option
|
||||
changed = False
|
||||
for ln in lines:
|
||||
if ln.get('line_type', '') == 'iface' and ln.get('iface', '') == iface and value != ln.get('params', {}).get('method', ''):
|
||||
if address_family is not None and ln.get('address_family') != address_family:
|
||||
continue
|
||||
changed = True
|
||||
ln['line'] = re.sub(ln.get('params', {}).get('method', '') + '$', value, ln.get('line'))
|
||||
ln['params']['method'] = value
|
||||
|
||||
@@ -161,7 +161,6 @@ import traceback
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible_collections.community.general.plugins.module_utils.ipa import IPAClient, ipa_argument_spec
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||
|
||||
|
||||
class HBACRuleIPAClient(IPAClient):
|
||||
@@ -232,17 +231,10 @@ def ensure(module, client):
|
||||
name = module.params['cn']
|
||||
state = module.params['state']
|
||||
|
||||
ipa_version = client.get_ipa_version()
|
||||
if state in ['present', 'enabled']:
|
||||
if LooseVersion(ipa_version) < LooseVersion('4.9.10'):
|
||||
ipaenabledflag = 'TRUE'
|
||||
else:
|
||||
ipaenabledflag = True
|
||||
ipaenabledflag = 'TRUE'
|
||||
else:
|
||||
if LooseVersion(ipa_version) < LooseVersion('4.9.10'):
|
||||
ipaenabledflag = 'FALSE'
|
||||
else:
|
||||
ipaenabledflag = False
|
||||
ipaenabledflag = 'FALSE'
|
||||
|
||||
host = module.params['host']
|
||||
hostcategory = module.params['hostcategory']
|
||||
|
||||
@@ -57,14 +57,13 @@ options:
|
||||
state:
|
||||
description:
|
||||
- State to ensure.
|
||||
- V("absent") and V("disabled") give the same results.
|
||||
- V("present") and V("enabled") give the same results.
|
||||
default: "present"
|
||||
choices: ["absent", "disabled", "enabled", "present"]
|
||||
type: str
|
||||
extends_documentation_fragment:
|
||||
- community.general.ipa.documentation
|
||||
- community.general.attributes
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
@@ -161,7 +160,7 @@ def ensure(module, client):
|
||||
module_hostgroup = get_hostgroup_dict(description=module.params['description'])
|
||||
|
||||
changed = False
|
||||
if state in ['present', 'enabled']:
|
||||
if state == 'present':
|
||||
if not ipa_hostgroup:
|
||||
changed = True
|
||||
if not module.check_mode:
|
||||
|
||||
@@ -237,7 +237,7 @@ def get_otptoken_dict(ansible_to_ipa, uniqueid=None, newuniqueid=None, otptype=N
|
||||
if owner is not None:
|
||||
otptoken[ansible_to_ipa['owner']] = owner
|
||||
if enabled is not None:
|
||||
otptoken[ansible_to_ipa['enabled']] = False if enabled else True
|
||||
otptoken[ansible_to_ipa['enabled']] = 'FALSE' if enabled else 'TRUE'
|
||||
if notbefore is not None:
|
||||
otptoken[ansible_to_ipa['notbefore']] = notbefore + 'Z'
|
||||
if notafter is not None:
|
||||
|
||||
@@ -186,7 +186,6 @@ import traceback
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible_collections.community.general.plugins.module_utils.ipa import IPAClient, ipa_argument_spec
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||
|
||||
|
||||
class SudoRuleIPAClient(IPAClient):
|
||||
@@ -311,17 +310,10 @@ def ensure(module, client):
|
||||
runasgroupcategory = module.params['runasgroupcategory']
|
||||
runasextusers = module.params['runasextusers']
|
||||
|
||||
ipa_version = client.get_ipa_version()
|
||||
if state in ['present', 'enabled']:
|
||||
if LooseVersion(ipa_version) < LooseVersion('4.9.10'):
|
||||
ipaenabledflag = 'TRUE'
|
||||
else:
|
||||
ipaenabledflag = True
|
||||
ipaenabledflag = 'TRUE'
|
||||
else:
|
||||
if LooseVersion(ipa_version) < LooseVersion('4.9.10'):
|
||||
ipaenabledflag = 'FALSE'
|
||||
else:
|
||||
ipaenabledflag = False
|
||||
ipaenabledflag = 'FALSE'
|
||||
|
||||
sudoopt = module.params['sudoopt']
|
||||
user = module.params['user']
|
||||
|
||||
@@ -207,9 +207,7 @@ saved:
|
||||
"# Completed"
|
||||
]
|
||||
tables:
|
||||
description:
|
||||
- The iptables on the system before the module has run, separated by table.
|
||||
- If the option O(table) is used, only this table is included.
|
||||
description: The iptables we have interest for when module starts.
|
||||
type: dict
|
||||
contains:
|
||||
table:
|
||||
@@ -348,27 +346,20 @@ def filter_and_format_state(string):
|
||||
return lines
|
||||
|
||||
|
||||
def parse_per_table_state(all_states_dump):
|
||||
def per_table_state(command, state):
|
||||
'''
|
||||
Convert raw iptables-save output into usable datastructure, for reliable
|
||||
comparisons between initial and final states.
|
||||
'''
|
||||
lines = filter_and_format_state(all_states_dump)
|
||||
tables = dict()
|
||||
current_table = ''
|
||||
current_list = list()
|
||||
for line in lines:
|
||||
if re.match(r'^[*](filter|mangle|nat|raw|security)$', line):
|
||||
current_table = line[1:]
|
||||
continue
|
||||
if line == 'COMMIT':
|
||||
tables[current_table] = current_list
|
||||
current_table = ''
|
||||
current_list = list()
|
||||
continue
|
||||
if line.startswith('# '):
|
||||
continue
|
||||
current_list.append(line)
|
||||
for t in TABLES:
|
||||
COMMAND = list(command)
|
||||
if '*%s' % t in state.splitlines():
|
||||
COMMAND.extend(['--table', t])
|
||||
dummy, out, dummy = module.run_command(COMMAND, check_rc=True)
|
||||
out = re.sub(r'(^|\n)(# Generated|# Completed|[*]%s|COMMIT)[^\n]*' % t, r'', out)
|
||||
out = re.sub(r' *\[[0-9]+:[0-9]+\] *', r'', out)
|
||||
tables[t] = [tt for tt in out.splitlines() if tt != '']
|
||||
return tables
|
||||
|
||||
|
||||
@@ -495,7 +486,7 @@ def main():
|
||||
# Depending on the value of 'table', initref_state may differ from
|
||||
# initial_state.
|
||||
(rc, stdout, stderr) = module.run_command(SAVECOMMAND, check_rc=True)
|
||||
tables_before = parse_per_table_state(stdout)
|
||||
tables_before = per_table_state(SAVECOMMAND, stdout)
|
||||
initref_state = filter_and_format_state(stdout)
|
||||
|
||||
if state == 'saved':
|
||||
@@ -592,17 +583,14 @@ def main():
|
||||
|
||||
(rc, stdout, stderr) = module.run_command(SAVECOMMAND, check_rc=True)
|
||||
restored_state = filter_and_format_state(stdout)
|
||||
tables_after = parse_per_table_state('\n'.join(restored_state))
|
||||
|
||||
if restored_state not in (initref_state, initial_state):
|
||||
for table_name, table_content in tables_after.items():
|
||||
if table_name not in tables_before:
|
||||
# Would initialize a table, which doesn't exist yet
|
||||
if module.check_mode:
|
||||
changed = True
|
||||
else:
|
||||
tables_after = per_table_state(SAVECOMMAND, stdout)
|
||||
if tables_after != tables_before:
|
||||
changed = True
|
||||
break
|
||||
if tables_before[table_name] != table_content:
|
||||
# Content of some table changes
|
||||
changed = True
|
||||
break
|
||||
|
||||
if _back is None or module.check_mode:
|
||||
module.exit_json(
|
||||
@@ -645,7 +633,7 @@ def main():
|
||||
os.remove(b_back)
|
||||
|
||||
(rc, stdout, stderr) = module.run_command(SAVECOMMAND, check_rc=True)
|
||||
tables_rollback = parse_per_table_state(stdout)
|
||||
tables_rollback = per_table_state(SAVECOMMAND, stdout)
|
||||
|
||||
msg = (
|
||||
"Failed to confirm state restored from %s after %ss. "
|
||||
|
||||
@@ -195,14 +195,7 @@ def send_msg(msg, server='localhost', port='6667', channel=None, nick_to=None, k
|
||||
|
||||
irc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
if use_ssl:
|
||||
if getattr(ssl, 'PROTOCOL_TLS', None) is not None:
|
||||
# Supported since Python 2.7.13
|
||||
context = ssl.SSLContext(ssl.PROTOCOL_TLS)
|
||||
else:
|
||||
context = ssl.SSLContext()
|
||||
context.verify_mode = ssl.CERT_NONE
|
||||
# TODO: create a secure context with `context = ssl.create_default_context()` instead!
|
||||
irc = context.wrap_socket(irc)
|
||||
irc = ssl.wrap_socket(irc)
|
||||
irc.connect((server, int(port)))
|
||||
|
||||
if passwd:
|
||||
|
||||
@@ -70,7 +70,7 @@ options:
|
||||
type: str
|
||||
required: true
|
||||
notes:
|
||||
- The C(pycdlib) library states it supports Python 2.7 and 3.4+.
|
||||
- The C(pycdlib) library states it supports Python 2.7 and 3.4 only.
|
||||
- >
|
||||
The function C(add_file) in pycdlib will overwrite the existing file in ISO with type ISO9660 / Rock Ridge 1.12 / Joliet / UDF.
|
||||
But it will not overwrite the existing file in ISO with Rock Ridge 1.09 / 1.10.
|
||||
|
||||
@@ -150,11 +150,13 @@ EXAMPLES = '''
|
||||
name: example
|
||||
certificate: |
|
||||
-----BEGIN CERTIFICATE-----
|
||||
h19dUZ2co2f...
|
||||
h19dUZ2co2fI/ibYiwxWk4aeNE6KWvCaTQOMQ8t6Uo2XKhpL/xnjoAgh1uCQN/69
|
||||
MG+34+RhUWzCfdZH7T8/qDxJw2kEPKluaYh7KnMsba+5jHjmtzix5QIDAQABo4IB
|
||||
-----END CERTIFICATE-----
|
||||
private_key: |
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
DBVFTEVDVFJ...
|
||||
DBVFTEVDVFJJQ0lURSBERSBGUkFOQ0UxFzAVBgNVBAsMDjAwMDIgNTUyMDgxMzE3
|
||||
GLlDNMw/uHyME7gHFsqJA7O11VY6O5WQ4IDP3m/s5ZV6s+Nn6Lerz17VZ99
|
||||
-----END RSA PRIVATE KEY-----
|
||||
password: changeit
|
||||
dest: /etc/security/keystore.jks
|
||||
@@ -470,7 +472,7 @@ class JavaKeystore:
|
||||
|
||||
if self.keystore_type == 'pkcs12':
|
||||
# Preserve properties of the destination file, if any.
|
||||
self.module.atomic_move(os.path.abspath(keystore_p12_path), os.path.abspath(self.keystore_path))
|
||||
self.module.atomic_move(keystore_p12_path, self.keystore_path)
|
||||
self.update_permissions()
|
||||
self.result['changed'] = True
|
||||
return self.result
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user