mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-04-29 01:46:53 +00:00
Compare commits
234 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
026a54a634 | ||
|
|
6b8698a402 | ||
|
|
225818b1d4 | ||
|
|
02cd2e9252 | ||
|
|
f6edbe1f98 | ||
|
|
973da9670f | ||
|
|
e5a5e5a3ff | ||
|
|
7279ea33dd | ||
|
|
7b302f1d06 | ||
|
|
46b60dfdca | ||
|
|
9df16f405b | ||
|
|
af5920a4f2 | ||
|
|
2295837edf | ||
|
|
099e17d408 | ||
|
|
9943c564d5 | ||
|
|
8dd0a14666 | ||
|
|
35442f9e28 | ||
|
|
9101fb5046 | ||
|
|
5f968f609b | ||
|
|
3ed0a9c141 | ||
|
|
7ef0d9371b | ||
|
|
d053b6a1dc | ||
|
|
44e6f3bd97 | ||
|
|
fcf9e08dfa | ||
|
|
9b973ce633 | ||
|
|
0bc29c344d | ||
|
|
cc52e194be | ||
|
|
9e112272be | ||
|
|
8361aecf15 | ||
|
|
ebe214074e | ||
|
|
8db1ffeb49 | ||
|
|
4ea6ad08fa | ||
|
|
5fb79182e9 | ||
|
|
01f50d249a | ||
|
|
5d3ea0977a | ||
|
|
0c213d76c5 | ||
|
|
98e0f0b6ff | ||
|
|
421b5253f8 | ||
|
|
5e1ff39c8d | ||
|
|
a87aa9e360 | ||
|
|
744c92ae0d | ||
|
|
5392e0f1cc | ||
|
|
d64e1e8e64 | ||
|
|
7d3f6be2a2 | ||
|
|
624c5f7b68 | ||
|
|
3db3608335 | ||
|
|
af4ac4813e | ||
|
|
5dc60cbc9e | ||
|
|
8f6ea3de8e | ||
|
|
e4765f2e2b | ||
|
|
a91255824e | ||
|
|
e60daa8509 | ||
|
|
fd2528dc3c | ||
|
|
72b282fe85 | ||
|
|
cb49b96b4d | ||
|
|
2aca1a910c | ||
|
|
c44f07a405 | ||
|
|
e113c8cf11 | ||
|
|
552e1e826c | ||
|
|
516107aaf6 | ||
|
|
fb7797702a | ||
|
|
21478e3be0 | ||
|
|
3e6974815f | ||
|
|
737789b1e3 | ||
|
|
3176c08b5b | ||
|
|
40e0379112 | ||
|
|
e3a12f1e54 | ||
|
|
3e740112a0 | ||
|
|
8ce81d24f2 | ||
|
|
ba7f680486 | ||
|
|
637eb8a275 | ||
|
|
7e608db3e5 | ||
|
|
afb16dc0a2 | ||
|
|
6d03887c55 | ||
|
|
f2d980f75a | ||
|
|
d0b27d68e2 | ||
|
|
70747b0db8 | ||
|
|
c9728c2869 | ||
|
|
adced191b0 | ||
|
|
adda78b162 | ||
|
|
5e010994d9 | ||
|
|
51bf8ed298 | ||
|
|
c3b1a3a873 | ||
|
|
5da6dc7d72 | ||
|
|
d96600d4de | ||
|
|
1f5703d0af | ||
|
|
170a099101 | ||
|
|
04df03f365 | ||
|
|
76f9bc5b69 | ||
|
|
f9fdf816eb | ||
|
|
7ff20a5e98 | ||
|
|
865a8e9961 | ||
|
|
f61a6bd991 | ||
|
|
f4bae2d717 | ||
|
|
66e3c19a57 | ||
|
|
24210b32cf | ||
|
|
6932a937c5 | ||
|
|
a467da2f39 | ||
|
|
7c76d92ed0 | ||
|
|
74c7cee446 | ||
|
|
7b532be10d | ||
|
|
38616e43f9 | ||
|
|
dfac632d25 | ||
|
|
270754dc10 | ||
|
|
fef77b3c9c | ||
|
|
b022b87362 | ||
|
|
2c92db98d5 | ||
|
|
b66df6932e | ||
|
|
5060f19a05 | ||
|
|
99652cb06d | ||
|
|
6aeeab18c2 | ||
|
|
2a2bfb6c5b | ||
|
|
773ab9ba25 | ||
|
|
4fde0617c8 | ||
|
|
8cc343110f | ||
|
|
37feac8f68 | ||
|
|
7392a8e52c | ||
|
|
bfebc93f15 | ||
|
|
b6e822aad2 | ||
|
|
2a6f91b4e4 | ||
|
|
8d94d16eec | ||
|
|
f2c08bebd6 | ||
|
|
e7f66d9bc1 | ||
|
|
21e402e2bb | ||
|
|
6d4c5b76a4 | ||
|
|
a1fd642008 | ||
|
|
8298b2c7c1 | ||
|
|
25ff8d4179 | ||
|
|
145ceb693b | ||
|
|
40d094e63a | ||
|
|
6988ea052d | ||
|
|
f26883f45f | ||
|
|
03b312c0ae | ||
|
|
a634cc2928 | ||
|
|
14f23fbebe | ||
|
|
77aabcd8f5 | ||
|
|
3a1f23323c | ||
|
|
6ccb9a9813 | ||
|
|
e6e4260926 | ||
|
|
7111edd631 | ||
|
|
a84a004308 | ||
|
|
ac5bbe666e | ||
|
|
0273420e70 | ||
|
|
ce8d9d56ca | ||
|
|
7f6623657f | ||
|
|
ccdec10c67 | ||
|
|
09abc09b52 | ||
|
|
c87ba3a626 | ||
|
|
cda6248cea | ||
|
|
2cbd8ba71e | ||
|
|
eb2b8f4409 | ||
|
|
a27b1a135d | ||
|
|
db6cb07028 | ||
|
|
6ccffc3de5 | ||
|
|
17cc574b04 | ||
|
|
1f2c352b83 | ||
|
|
b90f87f3d8 | ||
|
|
b23fdc3be3 | ||
|
|
bc83586c15 | ||
|
|
b765938b79 | ||
|
|
4ed5177d60 | ||
|
|
88ac419c0e | ||
|
|
cc63dd884c | ||
|
|
d817fc7215 | ||
|
|
64897d762c | ||
|
|
64cbf1900b | ||
|
|
7e23ef3801 | ||
|
|
27fc80895c | ||
|
|
0754449d60 | ||
|
|
460cd523fe | ||
|
|
ad2d899713 | ||
|
|
3182be1a2f | ||
|
|
b726110f1f | ||
|
|
24a4d6e685 | ||
|
|
1badcffe1c | ||
|
|
b87196348a | ||
|
|
21423ca6a0 | ||
|
|
4926f15d86 | ||
|
|
9466103a4a | ||
|
|
b26df2a008 | ||
|
|
cba4fa2fe8 | ||
|
|
833530ab47 | ||
|
|
1f0361a1c6 | ||
|
|
4df53dbacf | ||
|
|
f25519e308 | ||
|
|
1bb47ad73e | ||
|
|
fd3e84fcd6 | ||
|
|
f75471e7d2 | ||
|
|
aadf1d4f6a | ||
|
|
f5ff54979a | ||
|
|
957a74b525 | ||
|
|
29afed337a | ||
|
|
e748acdd51 | ||
|
|
eed45fe6aa | ||
|
|
cce52e1812 | ||
|
|
0bcce340ed | ||
|
|
da8a11b8d8 | ||
|
|
9ed0603072 | ||
|
|
e2fa11b381 | ||
|
|
36f7ff15e9 | ||
|
|
8eac491057 | ||
|
|
607f3d83a0 | ||
|
|
d6cd90838f | ||
|
|
22e0a6dac7 | ||
|
|
3751e188ca | ||
|
|
cd4f3ca445 | ||
|
|
1d05f81e53 | ||
|
|
4ef80ecd46 | ||
|
|
68e184eba8 | ||
|
|
5dcd2c7df5 | ||
|
|
134a0dc7e2 | ||
|
|
13e3e176fb | ||
|
|
eb98be580d | ||
|
|
49ef8b1900 | ||
|
|
19d22d605a | ||
|
|
f17b10bfa2 | ||
|
|
258eb68022 | ||
|
|
264c98189c | ||
|
|
7aec01190a | ||
|
|
00fd2847e4 | ||
|
|
94ea18f1cb | ||
|
|
0b42aca72f | ||
|
|
2658bf31cd | ||
|
|
869e1a1eab | ||
|
|
d25b6e7681 | ||
|
|
8beb5d70c5 | ||
|
|
f9fecf12e7 | ||
|
|
b165337bbe | ||
|
|
6572f46998 | ||
|
|
b4ae2ce44d | ||
|
|
baec510c40 | ||
|
|
96cda3a48a | ||
|
|
9dc2e2d032 | ||
|
|
86c0af6cbb |
@@ -1,9 +0,0 @@
|
||||
<!--
|
||||
Copyright (c) Ansible Project
|
||||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
-->
|
||||
|
||||
## Azure Pipelines Configuration
|
||||
|
||||
Please see the [Documentation](https://github.com/ansible/community/wiki/Testing:-Azure-Pipelines) for more information.
|
||||
@@ -1,417 +0,0 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- stable-*
|
||||
|
||||
pr:
|
||||
autoCancel: true
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- stable-*
|
||||
|
||||
schedules:
|
||||
- cron: 0 8 * * *
|
||||
displayName: Nightly (main)
|
||||
always: true
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- cron: 0 10 * * *
|
||||
displayName: Nightly (active stable branches)
|
||||
always: true
|
||||
branches:
|
||||
include:
|
||||
- stable-5
|
||||
- stable-4
|
||||
- cron: 0 11 * * 0
|
||||
displayName: Weekly (old stable branches)
|
||||
always: true
|
||||
branches:
|
||||
include:
|
||||
- stable-3
|
||||
|
||||
variables:
|
||||
- name: checkoutPath
|
||||
value: ansible_collections/community/general
|
||||
- name: coverageBranches
|
||||
value: main
|
||||
- name: pipelinesCoverage
|
||||
value: coverage
|
||||
- name: entryPoint
|
||||
value: tests/utils/shippable/shippable.sh
|
||||
- name: fetchDepth
|
||||
value: 0
|
||||
|
||||
resources:
|
||||
containers:
|
||||
- container: default
|
||||
image: quay.io/ansible/azure-pipelines-test-container:3.0.0
|
||||
|
||||
pool: Standard
|
||||
|
||||
stages:
|
||||
### Sanity
|
||||
- stage: Sanity_devel
|
||||
displayName: Sanity devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Test {0}
|
||||
testFormat: devel/sanity/{0}
|
||||
targets:
|
||||
- test: 1
|
||||
- test: 2
|
||||
- test: 3
|
||||
- test: 4
|
||||
- test: extra
|
||||
- stage: Sanity_2_13
|
||||
displayName: Sanity 2.13
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Test {0}
|
||||
testFormat: 2.13/sanity/{0}
|
||||
targets:
|
||||
- test: 1
|
||||
- test: 2
|
||||
- test: 3
|
||||
- test: 4
|
||||
- stage: Sanity_2_12
|
||||
displayName: Sanity 2.12
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Test {0}
|
||||
testFormat: 2.12/sanity/{0}
|
||||
targets:
|
||||
- test: 1
|
||||
- test: 2
|
||||
- test: 3
|
||||
- test: 4
|
||||
- stage: Sanity_2_11
|
||||
displayName: Sanity 2.11
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Test {0}
|
||||
testFormat: 2.11/sanity/{0}
|
||||
targets:
|
||||
- test: 1
|
||||
- test: 2
|
||||
- test: 3
|
||||
- test: 4
|
||||
### Units
|
||||
- stage: Units_devel
|
||||
displayName: Units devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: devel/units/{0}/1
|
||||
targets:
|
||||
- test: 2.7
|
||||
- test: 3.5
|
||||
- test: 3.6
|
||||
- test: 3.7
|
||||
- test: 3.8
|
||||
- test: 3.9
|
||||
- test: '3.10'
|
||||
- stage: Units_2_13
|
||||
displayName: Units 2.13
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.13/units/{0}/1
|
||||
targets:
|
||||
- test: 2.7
|
||||
- test: 3.6
|
||||
- test: 3.8
|
||||
- test: 3.9
|
||||
- stage: Units_2_12
|
||||
displayName: Units 2.12
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.12/units/{0}/1
|
||||
targets:
|
||||
- test: 2.6
|
||||
- test: 3.5
|
||||
- test: 3.8
|
||||
- stage: Units_2_11
|
||||
displayName: Units 2.11
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.11/units/{0}/1
|
||||
targets:
|
||||
- test: 2.6
|
||||
- test: 2.7
|
||||
- test: 3.5
|
||||
- test: 3.6
|
||||
- test: 3.9
|
||||
|
||||
## Remote
|
||||
- stage: Remote_devel
|
||||
displayName: Remote devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: devel/{0}
|
||||
targets:
|
||||
- name: macOS 12.0
|
||||
test: macos/12.0
|
||||
- name: RHEL 7.9
|
||||
test: rhel/7.9
|
||||
- name: RHEL 9.0
|
||||
test: rhel/9.0
|
||||
- name: FreeBSD 12.3
|
||||
test: freebsd/12.3
|
||||
- name: FreeBSD 13.1
|
||||
test: freebsd/13.1
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Remote_2_13
|
||||
displayName: Remote 2.13
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.13/{0}
|
||||
targets:
|
||||
- name: macOS 12.0
|
||||
test: macos/12.0
|
||||
- name: RHEL 8.5
|
||||
test: rhel/8.5
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Remote_2_12
|
||||
displayName: Remote 2.12
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.12/{0}
|
||||
targets:
|
||||
- name: macOS 11.1
|
||||
test: macos/11.1
|
||||
- name: RHEL 8.4
|
||||
test: rhel/8.4
|
||||
- name: FreeBSD 13.0
|
||||
test: freebsd/13.0
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Remote_2_11
|
||||
displayName: Remote 2.11
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.11/{0}
|
||||
targets:
|
||||
- name: RHEL 7.9
|
||||
test: rhel/7.9
|
||||
- name: RHEL 8.3
|
||||
test: rhel/8.3
|
||||
#- name: FreeBSD 12.2
|
||||
# test: freebsd/12.2
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
|
||||
### Docker
|
||||
- stage: Docker_devel
|
||||
displayName: Docker devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: devel/linux/{0}
|
||||
targets:
|
||||
- name: CentOS 7
|
||||
test: centos7
|
||||
- name: Fedora 36
|
||||
test: fedora36
|
||||
- name: openSUSE 15
|
||||
test: opensuse15
|
||||
- name: Ubuntu 20.04
|
||||
test: ubuntu2004
|
||||
- name: Ubuntu 22.04
|
||||
test: ubuntu2204
|
||||
- name: Alpine 3
|
||||
test: alpine3
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Docker_2_13
|
||||
displayName: Docker 2.13
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.13/linux/{0}
|
||||
targets:
|
||||
- name: Fedora 35
|
||||
test: fedora35
|
||||
- name: openSUSE 15 py2
|
||||
test: opensuse15py2
|
||||
- name: Alpine 3
|
||||
test: alpine3
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Docker_2_12
|
||||
displayName: Docker 2.12
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.12/linux/{0}
|
||||
targets:
|
||||
- name: CentOS 6
|
||||
test: centos6
|
||||
- name: Fedora 34
|
||||
test: fedora34
|
||||
- name: Ubuntu 18.04
|
||||
test: ubuntu1804
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Docker_2_11
|
||||
displayName: Docker 2.11
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.11/linux/{0}
|
||||
targets:
|
||||
- name: Fedora 32
|
||||
test: fedora32
|
||||
- name: Fedora 33
|
||||
test: fedora33
|
||||
- name: Alpine 3
|
||||
test: alpine3
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
|
||||
### Community Docker
|
||||
- stage: Docker_community_devel
|
||||
displayName: Docker (community images) devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: devel/linux-community/{0}
|
||||
targets:
|
||||
- name: Debian Bullseye
|
||||
test: debian-bullseye/3.9
|
||||
- name: ArchLinux
|
||||
test: archlinux/3.10
|
||||
- name: CentOS Stream 8
|
||||
test: centos-stream8/3.8
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
|
||||
### Cloud
|
||||
- stage: Cloud_devel
|
||||
displayName: Cloud devel
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: devel/cloud/{0}/1
|
||||
targets:
|
||||
- test: 2.7
|
||||
- test: '3.10'
|
||||
- stage: Cloud_2_13
|
||||
displayName: Cloud 2.13
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.13/cloud/{0}/1
|
||||
targets:
|
||||
- test: 3.9
|
||||
- stage: Cloud_2_12
|
||||
displayName: Cloud 2.12
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.12/cloud/{0}/1
|
||||
targets:
|
||||
- test: 3.8
|
||||
- stage: Cloud_2_11
|
||||
displayName: Cloud 2.11
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.11/cloud/{0}/1
|
||||
targets:
|
||||
- test: 2.7
|
||||
- test: 3.5
|
||||
|
||||
- stage: Summary
|
||||
condition: succeededOrFailed()
|
||||
dependsOn:
|
||||
- Sanity_devel
|
||||
- Sanity_2_11
|
||||
- Sanity_2_12
|
||||
- Sanity_2_13
|
||||
- Units_devel
|
||||
- Units_2_11
|
||||
- Units_2_12
|
||||
- Units_2_13
|
||||
- Remote_devel
|
||||
- Remote_2_11
|
||||
- Remote_2_12
|
||||
- Remote_2_13
|
||||
- Docker_devel
|
||||
- Docker_2_11
|
||||
- Docker_2_12
|
||||
- Docker_2_13
|
||||
- Docker_community_devel
|
||||
- Cloud_devel
|
||||
- Cloud_2_11
|
||||
- Cloud_2_12
|
||||
- Cloud_2_13
|
||||
jobs:
|
||||
- template: templates/coverage.yml
|
||||
@@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Aggregate code coverage results for later processing.
|
||||
|
||||
set -o pipefail -eu
|
||||
|
||||
agent_temp_directory="$1"
|
||||
|
||||
PATH="${PWD}/bin:${PATH}"
|
||||
|
||||
mkdir "${agent_temp_directory}/coverage/"
|
||||
|
||||
options=(--venv --venv-system-site-packages --color -v)
|
||||
|
||||
ansible-test coverage combine --group-by command --export "${agent_temp_directory}/coverage/" "${options[@]}"
|
||||
|
||||
if ansible-test coverage analyze targets generate --help >/dev/null 2>&1; then
|
||||
# Only analyze coverage if the installed version of ansible-test supports it.
|
||||
# Doing so allows this script to work unmodified for multiple Ansible versions.
|
||||
ansible-test coverage analyze targets generate "${agent_temp_directory}/coverage/coverage-analyze-targets.json" "${options[@]}"
|
||||
fi
|
||||
@@ -1,64 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
"""
|
||||
Combine coverage data from multiple jobs, keeping the data only from the most recent attempt from each job.
|
||||
Coverage artifacts must be named using the format: "Coverage $(System.JobAttempt) {StableUniqueNameForEachJob}"
|
||||
The recommended coverage artifact name format is: Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)
|
||||
Keep in mind that Azure Pipelines does not enforce unique job display names (only names).
|
||||
It is up to pipeline authors to avoid name collisions when deviating from the recommended format.
|
||||
"""
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
"""Main program entry point."""
|
||||
source_directory = sys.argv[1]
|
||||
|
||||
if '/ansible_collections/' in os.getcwd():
|
||||
output_path = "tests/output"
|
||||
else:
|
||||
output_path = "test/results"
|
||||
|
||||
destination_directory = os.path.join(output_path, 'coverage')
|
||||
|
||||
if not os.path.exists(destination_directory):
|
||||
os.makedirs(destination_directory)
|
||||
|
||||
jobs = {}
|
||||
count = 0
|
||||
|
||||
for name in os.listdir(source_directory):
|
||||
match = re.search('^Coverage (?P<attempt>[0-9]+) (?P<label>.+)$', name)
|
||||
label = match.group('label')
|
||||
attempt = int(match.group('attempt'))
|
||||
jobs[label] = max(attempt, jobs.get(label, 0))
|
||||
|
||||
for label, attempt in jobs.items():
|
||||
name = 'Coverage {attempt} {label}'.format(label=label, attempt=attempt)
|
||||
source = os.path.join(source_directory, name)
|
||||
source_files = os.listdir(source)
|
||||
|
||||
for source_file in source_files:
|
||||
source_path = os.path.join(source, source_file)
|
||||
destination_path = os.path.join(destination_directory, source_file + '.' + label)
|
||||
print('"%s" -> "%s"' % (source_path, destination_path))
|
||||
shutil.copyfile(source_path, destination_path)
|
||||
count += 1
|
||||
|
||||
print('Coverage file count: %d' % count)
|
||||
print('##vso[task.setVariable variable=coverageFileCount]%d' % count)
|
||||
print('##vso[task.setVariable variable=outputPath]%s' % output_path)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,28 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Check the test results and set variables for use in later steps.
|
||||
|
||||
set -o pipefail -eu
|
||||
|
||||
if [[ "$PWD" =~ /ansible_collections/ ]]; then
|
||||
output_path="tests/output"
|
||||
else
|
||||
output_path="test/results"
|
||||
fi
|
||||
|
||||
echo "##vso[task.setVariable variable=outputPath]${output_path}"
|
||||
|
||||
if compgen -G "${output_path}"'/junit/*.xml' > /dev/null; then
|
||||
echo "##vso[task.setVariable variable=haveTestResults]true"
|
||||
fi
|
||||
|
||||
if compgen -G "${output_path}"'/bot/ansible-test-*' > /dev/null; then
|
||||
echo "##vso[task.setVariable variable=haveBotResults]true"
|
||||
fi
|
||||
|
||||
if compgen -G "${output_path}"'/coverage/*' > /dev/null; then
|
||||
echo "##vso[task.setVariable variable=haveCoverageData]true"
|
||||
fi
|
||||
@@ -1,105 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
"""
|
||||
Upload code coverage reports to codecov.io.
|
||||
Multiple coverage files from multiple languages are accepted and aggregated after upload.
|
||||
Python coverage, as well as PowerShell and Python stubs can all be uploaded.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import dataclasses
|
||||
import pathlib
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
import typing as t
|
||||
import urllib.request
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class CoverageFile:
|
||||
name: str
|
||||
path: pathlib.Path
|
||||
flags: t.List[str]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Args:
|
||||
dry_run: bool
|
||||
path: pathlib.Path
|
||||
|
||||
|
||||
def parse_args() -> Args:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-n', '--dry-run', action='store_true')
|
||||
parser.add_argument('path', type=pathlib.Path)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Store arguments in a typed dataclass
|
||||
fields = dataclasses.fields(Args)
|
||||
kwargs = {field.name: getattr(args, field.name) for field in fields}
|
||||
|
||||
return Args(**kwargs)
|
||||
|
||||
|
||||
def process_files(directory: pathlib.Path) -> t.Tuple[CoverageFile, ...]:
|
||||
processed = []
|
||||
for file in directory.joinpath('reports').glob('coverage*.xml'):
|
||||
name = file.stem.replace('coverage=', '')
|
||||
|
||||
# Get flags from name
|
||||
flags = name.replace('-powershell', '').split('=') # Drop '-powershell' suffix
|
||||
flags = [flag if not flag.startswith('stub') else flag.split('-')[0] for flag in flags] # Remove "-01" from stub files
|
||||
|
||||
processed.append(CoverageFile(name, file, flags))
|
||||
|
||||
return tuple(processed)
|
||||
|
||||
|
||||
def upload_files(codecov_bin: pathlib.Path, files: t.Tuple[CoverageFile, ...], dry_run: bool = False) -> None:
|
||||
for file in files:
|
||||
cmd = [
|
||||
str(codecov_bin),
|
||||
'--name', file.name,
|
||||
'--file', str(file.path),
|
||||
]
|
||||
for flag in file.flags:
|
||||
cmd.extend(['--flags', flag])
|
||||
|
||||
if dry_run:
|
||||
print(f'DRY-RUN: Would run command: {cmd}')
|
||||
continue
|
||||
|
||||
subprocess.run(cmd, check=True)
|
||||
|
||||
|
||||
def download_file(url: str, dest: pathlib.Path, flags: int, dry_run: bool = False) -> None:
|
||||
if dry_run:
|
||||
print(f'DRY-RUN: Would download {url} to {dest} and set mode to {flags:o}')
|
||||
return
|
||||
|
||||
with urllib.request.urlopen(url) as resp:
|
||||
with dest.open('w+b') as f:
|
||||
# Read data in chunks rather than all at once
|
||||
shutil.copyfileobj(resp, f, 64 * 1024)
|
||||
|
||||
dest.chmod(flags)
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
url = 'https://ansible-ci-files.s3.amazonaws.com/codecov/linux/codecov'
|
||||
with tempfile.TemporaryDirectory(prefix='codecov-') as tmpdir:
|
||||
codecov_bin = pathlib.Path(tmpdir) / 'codecov'
|
||||
download_file(url, codecov_bin, 0o755, args.dry_run)
|
||||
|
||||
files = process_files(args.path)
|
||||
upload_files(codecov_bin, files, args.dry_run)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,19 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Generate code coverage reports for uploading to Azure Pipelines and codecov.io.
|
||||
|
||||
set -o pipefail -eu
|
||||
|
||||
PATH="${PWD}/bin:${PATH}"
|
||||
|
||||
if ! ansible-test --help >/dev/null 2>&1; then
|
||||
# Install the devel version of ansible-test for generating code coverage reports.
|
||||
# This is only used by Ansible Collections, which are typically tested against multiple Ansible versions (in separate jobs).
|
||||
# Since a version of ansible-test is required that can work the output from multiple older releases, the devel version is used.
|
||||
pip install https://github.com/ansible/ansible/archive/devel.tar.gz --disable-pip-version-check
|
||||
fi
|
||||
|
||||
ansible-test coverage xml --group-by command --stub --venv --venv-system-site-packages --color -v
|
||||
@@ -1,38 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Configure the test environment and run the tests.
|
||||
|
||||
set -o pipefail -eu
|
||||
|
||||
entry_point="$1"
|
||||
test="$2"
|
||||
read -r -a coverage_branches <<< "$3" # space separated list of branches to run code coverage on for scheduled builds
|
||||
|
||||
export COMMIT_MESSAGE
|
||||
export COMPLETE
|
||||
export COVERAGE
|
||||
export IS_PULL_REQUEST
|
||||
|
||||
if [ "${SYSTEM_PULLREQUEST_TARGETBRANCH:-}" ]; then
|
||||
IS_PULL_REQUEST=true
|
||||
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD^2)
|
||||
else
|
||||
IS_PULL_REQUEST=
|
||||
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD)
|
||||
fi
|
||||
|
||||
COMPLETE=
|
||||
COVERAGE=
|
||||
|
||||
if [ "${BUILD_REASON}" = "Schedule" ]; then
|
||||
COMPLETE=yes
|
||||
|
||||
if printf '%s\n' "${coverage_branches[@]}" | grep -q "^${BUILD_SOURCEBRANCHNAME}$"; then
|
||||
COVERAGE=yes
|
||||
fi
|
||||
fi
|
||||
|
||||
"${entry_point}" "${test}" 2>&1 | "$(dirname "$0")/time-command.py"
|
||||
@@ -1,29 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
"""Prepends a relative timestamp to each input line from stdin and writes it to stdout."""
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import sys
|
||||
import time
|
||||
|
||||
|
||||
def main():
|
||||
"""Main program entry point."""
|
||||
start = time.time()
|
||||
|
||||
sys.stdin.reconfigure(errors='surrogateescape')
|
||||
sys.stdout.reconfigure(errors='surrogateescape')
|
||||
|
||||
for line in sys.stdin:
|
||||
seconds = time.time() - start
|
||||
sys.stdout.write('%02d:%02d %s' % (seconds // 60, seconds % 60, line))
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,44 +0,0 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# This template adds a job for processing code coverage data.
|
||||
# It will upload results to Azure Pipelines and codecov.io.
|
||||
# Use it from a job stage that completes after all other jobs have completed.
|
||||
# This can be done by placing it in a separate summary stage that runs after the test stage(s) have completed.
|
||||
|
||||
jobs:
|
||||
- job: Coverage
|
||||
displayName: Code Coverage
|
||||
container: default
|
||||
workspace:
|
||||
clean: all
|
||||
steps:
|
||||
- checkout: self
|
||||
fetchDepth: $(fetchDepth)
|
||||
path: $(checkoutPath)
|
||||
- task: DownloadPipelineArtifact@2
|
||||
displayName: Download Coverage Data
|
||||
inputs:
|
||||
path: coverage/
|
||||
patterns: "Coverage */*=coverage.combined"
|
||||
- bash: .azure-pipelines/scripts/combine-coverage.py coverage/
|
||||
displayName: Combine Coverage Data
|
||||
- bash: .azure-pipelines/scripts/report-coverage.sh
|
||||
displayName: Generate Coverage Report
|
||||
condition: gt(variables.coverageFileCount, 0)
|
||||
- task: PublishCodeCoverageResults@1
|
||||
inputs:
|
||||
codeCoverageTool: Cobertura
|
||||
# Azure Pipelines only accepts a single coverage data file.
|
||||
# That means only Python or PowerShell coverage can be uploaded, but not both.
|
||||
# Set the "pipelinesCoverage" variable to determine which type is uploaded.
|
||||
# Use "coverage" for Python and "coverage-powershell" for PowerShell.
|
||||
summaryFileLocation: "$(outputPath)/reports/$(pipelinesCoverage).xml"
|
||||
displayName: Publish to Azure Pipelines
|
||||
condition: gt(variables.coverageFileCount, 0)
|
||||
- bash: .azure-pipelines/scripts/publish-codecov.py "$(outputPath)"
|
||||
displayName: Publish to codecov.io
|
||||
condition: gt(variables.coverageFileCount, 0)
|
||||
continueOnError: true
|
||||
@@ -1,60 +0,0 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# This template uses the provided targets and optional groups to generate a matrix which is then passed to the test template.
|
||||
# If this matrix template does not provide the required functionality, consider using the test template directly instead.
|
||||
|
||||
parameters:
|
||||
# A required list of dictionaries, one per test target.
|
||||
# Each item in the list must contain a "test" or "name" key.
|
||||
# Both may be provided. If one is omitted, the other will be used.
|
||||
- name: targets
|
||||
type: object
|
||||
|
||||
# An optional list of values which will be used to multiply the targets list into a matrix.
|
||||
# Values can be strings or numbers.
|
||||
- name: groups
|
||||
type: object
|
||||
default: []
|
||||
|
||||
# An optional format string used to generate the job name.
|
||||
# - {0} is the name of an item in the targets list.
|
||||
- name: nameFormat
|
||||
type: string
|
||||
default: "{0}"
|
||||
|
||||
# An optional format string used to generate the test name.
|
||||
# - {0} is the name of an item in the targets list.
|
||||
- name: testFormat
|
||||
type: string
|
||||
default: "{0}"
|
||||
|
||||
# An optional format string used to add the group to the job name.
|
||||
# {0} is the formatted name of an item in the targets list.
|
||||
# {{1}} is the group -- be sure to include the double "{{" and "}}".
|
||||
- name: nameGroupFormat
|
||||
type: string
|
||||
default: "{0} - {{1}}"
|
||||
|
||||
# An optional format string used to add the group to the test name.
|
||||
# {0} is the formatted test of an item in the targets list.
|
||||
# {{1}} is the group -- be sure to include the double "{{" and "}}".
|
||||
- name: testGroupFormat
|
||||
type: string
|
||||
default: "{0}/{{1}}"
|
||||
|
||||
jobs:
|
||||
- template: test.yml
|
||||
parameters:
|
||||
jobs:
|
||||
- ${{ if eq(length(parameters.groups), 0) }}:
|
||||
- ${{ each target in parameters.targets }}:
|
||||
- name: ${{ format(parameters.nameFormat, coalesce(target.name, target.test)) }}
|
||||
test: ${{ format(parameters.testFormat, coalesce(target.test, target.name)) }}
|
||||
- ${{ if not(eq(length(parameters.groups), 0)) }}:
|
||||
- ${{ each group in parameters.groups }}:
|
||||
- ${{ each target in parameters.targets }}:
|
||||
- name: ${{ format(format(parameters.nameGroupFormat, parameters.nameFormat), coalesce(target.name, target.test), group) }}
|
||||
test: ${{ format(format(parameters.testGroupFormat, parameters.testFormat), coalesce(target.test, target.name), group) }}
|
||||
@@ -1,50 +0,0 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# This template uses the provided list of jobs to create test one or more test jobs.
|
||||
# It can be used directly if needed, or through the matrix template.
|
||||
|
||||
parameters:
|
||||
# A required list of dictionaries, one per test job.
|
||||
# Each item in the list must contain a "job" and "name" key.
|
||||
- name: jobs
|
||||
type: object
|
||||
|
||||
jobs:
|
||||
- ${{ each job in parameters.jobs }}:
|
||||
- job: test_${{ replace(replace(replace(job.test, '/', '_'), '.', '_'), '-', '_') }}
|
||||
displayName: ${{ job.name }}
|
||||
container: default
|
||||
workspace:
|
||||
clean: all
|
||||
steps:
|
||||
- checkout: self
|
||||
fetchDepth: $(fetchDepth)
|
||||
path: $(checkoutPath)
|
||||
- bash: .azure-pipelines/scripts/run-tests.sh "$(entryPoint)" "${{ job.test }}" "$(coverageBranches)"
|
||||
displayName: Run Tests
|
||||
- bash: .azure-pipelines/scripts/process-results.sh
|
||||
condition: succeededOrFailed()
|
||||
displayName: Process Results
|
||||
- bash: .azure-pipelines/scripts/aggregate-coverage.sh "$(Agent.TempDirectory)"
|
||||
condition: eq(variables.haveCoverageData, 'true')
|
||||
displayName: Aggregate Coverage Data
|
||||
- task: PublishTestResults@2
|
||||
condition: eq(variables.haveTestResults, 'true')
|
||||
inputs:
|
||||
testResultsFiles: "$(outputPath)/junit/*.xml"
|
||||
displayName: Publish Test Results
|
||||
- task: PublishPipelineArtifact@1
|
||||
condition: eq(variables.haveBotResults, 'true')
|
||||
displayName: Publish Bot Results
|
||||
inputs:
|
||||
targetPath: "$(outputPath)/bot/"
|
||||
artifactName: "Bot $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
|
||||
- task: PublishPipelineArtifact@1
|
||||
condition: eq(variables.haveCoverageData, 'true')
|
||||
displayName: Publish Coverage Data
|
||||
inputs:
|
||||
targetPath: "$(Agent.TempDirectory)/coverage/"
|
||||
artifactName: "Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
|
||||
141
.github/BOTMETA.yml
vendored
141
.github/BOTMETA.yml
vendored
@@ -327,13 +327,13 @@ files:
|
||||
$module_utils/xfconf.py:
|
||||
maintainers: russoz
|
||||
labels: xfconf
|
||||
$modules/cloud/alicloud/:
|
||||
$modules/cloud/alicloud/ali_:
|
||||
maintainers: xiaozhu36
|
||||
$modules/cloud/atomic/atomic_container.py:
|
||||
maintainers: giuseppe krsacme
|
||||
$modules/cloud/atomic/:
|
||||
$modules/cloud/atomic/atomic_:
|
||||
maintainers: krsacme
|
||||
$modules/cloud/centurylink/:
|
||||
$modules/cloud/centurylink/clc_:
|
||||
maintainers: clc-runner
|
||||
$modules/cloud/dimensiondata/dimensiondata_network.py:
|
||||
maintainers: aimonb tintoy
|
||||
@@ -342,22 +342,22 @@ files:
|
||||
maintainers: tintoy
|
||||
$modules/cloud/heroku/heroku_collaborator.py:
|
||||
maintainers: marns93
|
||||
$modules/cloud/huawei/:
|
||||
$modules/cloud/huawei/hwc_:
|
||||
maintainers: $team_huawei huaweicloud
|
||||
keywords: cloud huawei hwc
|
||||
$modules/cloud/linode/:
|
||||
$modules/cloud/linode/linode:
|
||||
maintainers: $team_linode
|
||||
$modules/cloud/linode/linode.py:
|
||||
maintainers: zbal
|
||||
$modules/cloud/lxc/lxc_container.py:
|
||||
maintainers: cloudnull
|
||||
$modules/cloud/lxd/:
|
||||
$modules/cloud/lxd/lxd_:
|
||||
ignore: hnakamur
|
||||
$modules/cloud/lxd/lxd_profile.py:
|
||||
maintainers: conloos
|
||||
$modules/cloud/lxd/lxd_project.py:
|
||||
maintainers: we10710aa
|
||||
$modules/cloud/memset/:
|
||||
$modules/cloud/memset/memset_:
|
||||
maintainers: glitchcrab
|
||||
$modules/cloud/misc/cloud_init_data_facts.py:
|
||||
maintainers: resmo
|
||||
@@ -378,46 +378,49 @@ files:
|
||||
$modules/cloud/misc/proxmox_template.py:
|
||||
maintainers: UnderGreen
|
||||
ignore: skvidal
|
||||
$modules/cloud/misc/proxmox_disk.py:
|
||||
maintainers: castorsky
|
||||
$modules/cloud/misc/rhevm.py:
|
||||
maintainers: $team_virt TimothyVandenbrande
|
||||
labels: rhevm virt
|
||||
ignore: skvidal
|
||||
keywords: kvm libvirt proxmox qemu
|
||||
$modules/cloud/misc/:
|
||||
$modules/cloud/misc/serverless.py:
|
||||
ignore: ryansb
|
||||
$modules/cloud/misc/terraform.py:
|
||||
maintainers: m-yosefpor rainerleber
|
||||
ignore: ryansb
|
||||
$modules/cloud/misc/xenserver_facts.py:
|
||||
maintainers: caphrim007 cheese
|
||||
labels: xenserver_facts
|
||||
ignore: andyhky
|
||||
$modules/cloud/oneandone/:
|
||||
ignore: andyhky ryansb
|
||||
$modules/cloud/oneandone/oneandone_:
|
||||
maintainers: aajdinov edevenport
|
||||
$modules/cloud/online/:
|
||||
$modules/cloud/online/online_:
|
||||
maintainers: remyleone
|
||||
$modules/cloud/opennebula/:
|
||||
$modules/cloud/opennebula/one_:
|
||||
maintainers: $team_opennebula
|
||||
$modules/cloud/opennebula/one_host.py:
|
||||
maintainers: rvalle
|
||||
$modules/cloud/oracle/oci_vcn.py:
|
||||
maintainers: $team_oracle rohitChaware
|
||||
$modules/cloud/ovh/:
|
||||
$modules/cloud/ovh/ovh_:
|
||||
maintainers: pascalheraud
|
||||
$modules/cloud/ovh/ovh_monthly_billing.py:
|
||||
maintainers: fraff
|
||||
$modules/cloud/packet/packet_device.py:
|
||||
maintainers: baldwinSPC t0mk teebes
|
||||
$modules/cloud/packet/:
|
||||
$modules/cloud/packet/packet_:
|
||||
maintainers: nurfet-becirevic t0mk
|
||||
$modules/cloud/packet/packet_sshkey.py:
|
||||
maintainers: t0mk
|
||||
$modules/cloud/profitbricks/:
|
||||
$modules/cloud/profitbricks/profitbricks:
|
||||
maintainers: baldwinSPC
|
||||
$modules/cloud/pubnub/pubnub_blocks.py:
|
||||
maintainers: parfeon pubnub
|
||||
$modules/cloud/rackspace/rax.py:
|
||||
maintainers: omgjlk sivel
|
||||
$modules/cloud/rackspace/:
|
||||
$modules/cloud/rackspace/rax:
|
||||
ignore: ryansb sivel
|
||||
$modules/cloud/rackspace/rax_cbs.py:
|
||||
maintainers: claco
|
||||
@@ -455,10 +458,14 @@ files:
|
||||
maintainers: smashwilson
|
||||
$modules/cloud/rackspace/rax_queue.py:
|
||||
maintainers: claco
|
||||
$modules/cloud/scaleway/:
|
||||
$modules/cloud/scaleway/scaleway_:
|
||||
maintainers: $team_scaleway
|
||||
$modules/cloud/scaleway/scaleway_compute_private_network.py:
|
||||
maintainers: pastral
|
||||
$modules/cloud/scaleway/scaleway_container_registry.py:
|
||||
maintainers: Lunik
|
||||
$modules/cloud/scaleway/scaleway_container_registry_info.py:
|
||||
maintainers: Lunik
|
||||
$modules/cloud/scaleway/scaleway_database_backup.py:
|
||||
maintainers: guillaume_ro_fr
|
||||
$modules/cloud/scaleway/scaleway_image_info.py:
|
||||
@@ -484,29 +491,39 @@ files:
|
||||
ignore: hekonsek
|
||||
$modules/cloud/scaleway/scaleway_volume_info.py:
|
||||
maintainers: Spredzy
|
||||
$modules/cloud/smartos/:
|
||||
$modules/cloud/smartos/imgadm.py:
|
||||
maintainers: $team_solaris
|
||||
labels: solaris
|
||||
keywords: beadm dladm illumos ipadm nexenta omnios openindiana pfexec smartos solaris sunos zfs zpool
|
||||
$modules/cloud/smartos/nictagadm.py:
|
||||
maintainers: SmithX10
|
||||
maintainers: $team_solaris SmithX10
|
||||
labels: solaris
|
||||
keywords: beadm dladm illumos ipadm nexenta omnios openindiana pfexec smartos solaris sunos zfs zpool
|
||||
$modules/cloud/smartos/smartos_image_info.py:
|
||||
maintainers: $team_solaris
|
||||
labels: solaris
|
||||
keywords: beadm dladm illumos ipadm nexenta omnios openindiana pfexec smartos solaris sunos zfs zpool
|
||||
$modules/cloud/smartos/vmadm.py:
|
||||
maintainers: $team_solaris
|
||||
labels: solaris
|
||||
keywords: beadm dladm illumos ipadm nexenta omnios openindiana pfexec smartos solaris sunos zfs zpool
|
||||
$modules/cloud/softlayer/sl_vm.py:
|
||||
maintainers: mcltn
|
||||
$modules/cloud/spotinst/spotinst_aws_elastigroup.py:
|
||||
maintainers: talzur
|
||||
$modules/cloud/univention/:
|
||||
$modules/cloud/univention/udm_:
|
||||
maintainers: keachi
|
||||
$modules/cloud/webfaction/:
|
||||
$modules/cloud/webfaction/webfaction_:
|
||||
maintainers: quentinsf
|
||||
$modules/cloud/xenserver/:
|
||||
$modules/cloud/xenserver/xenserver_:
|
||||
maintainers: bvitnik
|
||||
$modules/clustering/consul/:
|
||||
$modules/clustering/consul/consul:
|
||||
maintainers: $team_consul
|
||||
ignore: colin-nolan
|
||||
$modules/clustering/etcd3.py:
|
||||
maintainers: evrardjp
|
||||
ignore: vfauth
|
||||
$modules/clustering/nomad/:
|
||||
$modules/clustering/nomad/nomad_:
|
||||
maintainers: chris93111
|
||||
$modules/clustering/pacemaker_cluster.py:
|
||||
maintainers: matbu
|
||||
@@ -514,7 +531,7 @@ files:
|
||||
maintainers: treyperry
|
||||
$modules/database/aerospike/aerospike_migrations.py:
|
||||
maintainers: Alb0t
|
||||
$modules/database/influxdb/:
|
||||
$modules/database/influxdb/influxdb_:
|
||||
maintainers: kamsz
|
||||
$modules/database/influxdb/influxdb_query.py:
|
||||
maintainers: resmo
|
||||
@@ -548,7 +565,7 @@ files:
|
||||
labels: mssql_script
|
||||
$modules/database/saphana/hana_query.py:
|
||||
maintainers: rainerleber
|
||||
$modules/database/vertica/:
|
||||
$modules/database/vertica/vertica_:
|
||||
maintainers: dareko
|
||||
$modules/files/archive.py:
|
||||
maintainers: bendoh
|
||||
@@ -558,6 +575,8 @@ files:
|
||||
maintainers: jpmens noseka1
|
||||
$modules/files/iso_create.py:
|
||||
maintainers: Tomorrow9
|
||||
$modules/files/iso_customize.py:
|
||||
maintainers: ZouYuhua
|
||||
$modules/files/iso_extract.py:
|
||||
maintainers: dagwieers jhoekx ribbons
|
||||
$modules/files/read_csv.py:
|
||||
@@ -571,7 +590,7 @@ files:
|
||||
maintainers: dagwieers magnus919 tbielawa cmprescott sm4rk0
|
||||
labels: m:xml xml
|
||||
ignore: magnus919
|
||||
$modules/identity/ipa/:
|
||||
$modules/identity/ipa/ipa_:
|
||||
maintainers: $team_ipa
|
||||
$modules/identity/ipa/ipa_pwpolicy.py:
|
||||
maintainers: adralioh
|
||||
@@ -579,7 +598,7 @@ files:
|
||||
maintainers: cprh
|
||||
$modules/identity/ipa/ipa_vault.py:
|
||||
maintainers: jparrill
|
||||
$modules/identity/keycloak/:
|
||||
$modules/identity/keycloak/keycloak_:
|
||||
maintainers: $team_keycloak
|
||||
$modules/identity/keycloak/keycloak_authentication.py:
|
||||
maintainers: elfelip Gaetan2907
|
||||
@@ -587,6 +606,8 @@ files:
|
||||
maintainers: Gaetan2907
|
||||
$modules/identity/keycloak/keycloak_client_rolemapping.py:
|
||||
maintainers: Gaetan2907
|
||||
$modules/identity/keycloak/keycloak_user_rolemapping.py:
|
||||
maintainers: bratwurzt
|
||||
$modules/identity/keycloak/keycloak_group.py:
|
||||
maintainers: adamgoossens
|
||||
$modules/identity/keycloak/keycloak_identity_provider.py:
|
||||
@@ -655,10 +676,10 @@ files:
|
||||
maintainers: thaumos
|
||||
$modules/monitoring/rollbar_deployment.py:
|
||||
maintainers: kavu
|
||||
$modules/monitoring/sensu/sensu_:
|
||||
maintainers: dmsimard
|
||||
$modules/monitoring/sensu/sensu_check.py:
|
||||
maintainers: andsens
|
||||
$modules/monitoring/sensu/:
|
||||
maintainers: dmsimard
|
||||
$modules/monitoring/sensu/sensu_silence.py:
|
||||
maintainers: smbambling
|
||||
$modules/monitoring/sensu/sensu_subscription.py:
|
||||
@@ -715,7 +736,7 @@ files:
|
||||
maintainers: nerzhul
|
||||
$modules/net_tools/omapi_host.py:
|
||||
maintainers: amasolov nerzhul
|
||||
$modules/net_tools/pritunl/:
|
||||
$modules/net_tools/pritunl/pritunl_:
|
||||
maintainers: Lowess
|
||||
$modules/net_tools/nmcli.py:
|
||||
maintainers: alcamie101
|
||||
@@ -764,7 +785,8 @@ files:
|
||||
labels: rocketchat
|
||||
ignore: ramondelafuente
|
||||
$modules/notification/say.py:
|
||||
maintainers: $team_ansible_core mpdehaan
|
||||
maintainers: $team_ansible_core
|
||||
ignore: mpdehaan
|
||||
$modules/notification/sendgrid.py:
|
||||
maintainers: makaimc
|
||||
$modules/notification/slack.py:
|
||||
@@ -951,21 +973,24 @@ files:
|
||||
maintainers: $team_suse
|
||||
labels: zypper
|
||||
ignore: matze
|
||||
$modules/remote_management/cobbler/:
|
||||
$modules/remote_management/cobbler/cobbler_:
|
||||
maintainers: dagwieers
|
||||
$modules/remote_management/hpilo/:
|
||||
$modules/remote_management/hpilo/hpilo_:
|
||||
maintainers: haad
|
||||
ignore: dagwieers
|
||||
$modules/remote_management/hpilo/hponcfg.py:
|
||||
maintainers: haad
|
||||
ignore: dagwieers
|
||||
$modules/remote_management/imc/imc_rest.py:
|
||||
maintainers: dagwieers
|
||||
labels: cisco
|
||||
$modules/remote_management/ipmi/:
|
||||
$modules/remote_management/ipmi/ipmi_:
|
||||
maintainers: bgaifullin cloudnull
|
||||
$modules/remote_management/lenovoxcc/:
|
||||
$modules/remote_management/lenovoxcc/xcc_:
|
||||
maintainers: panyy3 renxulei
|
||||
$modules/remote_management/lxca/:
|
||||
$modules/remote_management/lxca/lxca_:
|
||||
maintainers: navalkp prabhosa
|
||||
$modules/remote_management/manageiq/:
|
||||
$modules/remote_management/manageiq/manageiq_:
|
||||
labels: manageiq
|
||||
maintainers: $team_manageiq
|
||||
$modules/remote_management/manageiq/manageiq_alert_profiles.py:
|
||||
@@ -974,9 +999,13 @@ files:
|
||||
maintainers: elad661
|
||||
$modules/remote_management/manageiq/manageiq_group.py:
|
||||
maintainers: evertmulder
|
||||
$modules/remote_management/manageiq/manageiq_policies_info.py:
|
||||
maintainers: russoz $team_manageiq
|
||||
$modules/remote_management/manageiq/manageiq_tags_info.py:
|
||||
maintainers: russoz $team_manageiq
|
||||
$modules/remote_management/manageiq/manageiq_tenant.py:
|
||||
maintainers: evertmulder
|
||||
$modules/remote_management/oneview/:
|
||||
$modules/remote_management/oneview/oneview_:
|
||||
maintainers: adriane-cardozo fgbulsoni tmiotto
|
||||
$modules/remote_management/oneview/oneview_datacenter_info.py:
|
||||
maintainers: aalexmonteiro madhav-bharadwaj ricardogpsf soodpr
|
||||
@@ -984,7 +1013,16 @@ files:
|
||||
maintainers: fgbulsoni
|
||||
$modules/remote_management/oneview/oneview_fcoe_network.py:
|
||||
maintainers: fgbulsoni
|
||||
$modules/remote_management/redfish/:
|
||||
$modules/remote_management/redfish/idrac_:
|
||||
maintainers: $team_redfish
|
||||
ignore: jose-delarosa
|
||||
$modules/remote_management/redfish/ilo_:
|
||||
maintainers: $team_redfish
|
||||
ignore: jose-delarosa
|
||||
$modules/remote_management/redfish/redfish_:
|
||||
maintainers: $team_redfish
|
||||
ignore: jose-delarosa
|
||||
$modules/remote_management/redfish/wdc_:
|
||||
maintainers: $team_redfish
|
||||
ignore: jose-delarosa
|
||||
$modules/remote_management/redfish/wdc_redfish_command.py:
|
||||
@@ -996,7 +1034,7 @@ files:
|
||||
labels: stacki_host
|
||||
$modules/remote_management/wakeonlan.py:
|
||||
maintainers: dagwieers
|
||||
$modules/source_control/bitbucket/:
|
||||
$modules/source_control/bitbucket/bitbucket_:
|
||||
maintainers: catcombo
|
||||
$modules/source_control/bzr.py:
|
||||
maintainers: andreparames
|
||||
@@ -1014,9 +1052,9 @@ files:
|
||||
maintainers: adrianmoisey
|
||||
$modules/source_control/github/github_repo.py:
|
||||
maintainers: atorrescogollo
|
||||
$modules/source_control/github/:
|
||||
$modules/source_control/github/github_:
|
||||
maintainers: stpierre
|
||||
$modules/source_control/gitlab/:
|
||||
$modules/source_control/gitlab/gitlab_:
|
||||
notify: jlozadad
|
||||
maintainers: $team_gitlab
|
||||
keywords: gitlab source_control
|
||||
@@ -1034,13 +1072,13 @@ files:
|
||||
maintainers: remixtj
|
||||
$modules/storage/hpe3par/ss_3par_cpg.py:
|
||||
maintainers: farhan7500 gautamphegde
|
||||
$modules/storage/ibm/:
|
||||
$modules/storage/ibm/ibm_sa_:
|
||||
maintainers: tzure
|
||||
$modules/storage/pmem/pmem.py:
|
||||
maintainers: mizumm
|
||||
$modules/storage/vexata/:
|
||||
$modules/storage/vexata/vexata_:
|
||||
maintainers: vexata
|
||||
$modules/storage/zfs/:
|
||||
$modules/storage/zfs/zfs:
|
||||
maintainers: $team_solaris
|
||||
labels: solaris
|
||||
keywords: beadm dladm illumos ipadm nexenta omnios openindiana pfexec smartos solaris sunos zfs zpool
|
||||
@@ -1048,6 +1086,10 @@ files:
|
||||
maintainers: johanwiren
|
||||
$modules/storage/zfs/zfs_delegate_admin.py:
|
||||
maintainers: natefoo
|
||||
$modules/storage/zfs/zpool_facts:
|
||||
maintainers: $team_solaris
|
||||
labels: solaris
|
||||
keywords: beadm dladm illumos ipadm nexenta omnios openindiana pfexec smartos solaris sunos zfs zpool
|
||||
$modules/system/aix:
|
||||
maintainers: $team_aix
|
||||
labels: aix
|
||||
@@ -1129,7 +1171,8 @@ files:
|
||||
$modules/system/nosh.py:
|
||||
maintainers: tacatac
|
||||
$modules/system/ohai.py:
|
||||
maintainers: $team_ansible_core mpdehaan
|
||||
maintainers: $team_ansible_core
|
||||
ignore: mpdehaan
|
||||
labels: ohai
|
||||
$modules/system/open_iscsi.py:
|
||||
maintainers: srvg
|
||||
@@ -1245,7 +1288,7 @@ files:
|
||||
maintainers: phsmith
|
||||
$modules/web_infrastructure/rundeck_job_executions_info.py:
|
||||
maintainers: phsmith
|
||||
$modules/web_infrastructure/sophos_utm/:
|
||||
$modules/web_infrastructure/sophos_utm/utm_:
|
||||
maintainers: $team_e_spirit
|
||||
keywords: sophos utm
|
||||
$modules/web_infrastructure/sophos_utm/utm_proxy_auth_profile.py:
|
||||
@@ -1315,7 +1358,7 @@ macros:
|
||||
team_opennebula: ilicmilan meerkampdvv rsmontero xorel nilsding
|
||||
team_oracle: manojmeda mross22 nalsaber
|
||||
team_purestorage: bannaych dnix101 genegr lionmax opslounge raekins sdodsley sile16
|
||||
team_redfish: mraineri tomasg2012 xmadsen renxulei rajeevkallur bhavya06
|
||||
team_redfish: mraineri tomasg2012 xmadsen renxulei rajeevkallur bhavya06 jyundt
|
||||
team_rhn: FlossWare alikins barnabycourt vritant
|
||||
team_scaleway: remyleone abarbare
|
||||
team_solaris: bcoca fishman jasperla jpdasma mator scathatheworm troy2914 xen0l
|
||||
|
||||
61
.github/workflows/codeql-analysis.yml
vendored
61
.github/workflows/codeql-analysis.yml
vendored
@@ -1,61 +0,0 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
name: "Code scanning - action"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '26 19 * * 1'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
|
||||
permissions:
|
||||
actions: read # for github/codeql-action/init to get workflow details
|
||||
contents: read # for actions/checkout to fetch code
|
||||
security-events: write # for github/codeql-action/autobuild to send a status report
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event, then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
32
.github/workflows/reuse.yml
vendored
32
.github/workflows/reuse.yml
vendored
@@ -1,32 +0,0 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
name: Verify REUSE
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
# Run CI once per day (at 07:30 UTC)
|
||||
schedule:
|
||||
- cron: '30 7 * * *'
|
||||
|
||||
jobs:
|
||||
check:
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install reuse
|
||||
|
||||
- name: Check REUSE compliance
|
||||
run: |
|
||||
reuse lint
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -509,3 +509,6 @@ $RECYCLE.BIN/
|
||||
*.lnk
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/vim,git,macos,linux,pydev,emacs,dotenv,python,windows,webstorm,pycharm+all,jupyternotebooks
|
||||
|
||||
# Integration tests cloud configs
|
||||
tests/integration/cloud-config-*.ini
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.0.1
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: mixed-line-ending
|
||||
args: [--fix=lf]
|
||||
- id: fix-encoding-pragma
|
||||
- id: check-ast
|
||||
- id: check-merge-conflict
|
||||
- id: check-symlinks
|
||||
- repo: https://github.com/pre-commit/pygrep-hooks
|
||||
rev: v1.9.0
|
||||
hooks:
|
||||
- id: rst-backticks
|
||||
types: [file]
|
||||
files: changelogs/fragments/.*\.(yml|yaml)$
|
||||
338
CHANGELOG.rst
338
CHANGELOG.rst
@@ -6,6 +6,344 @@ Community General Release Notes
|
||||
|
||||
This changelog describes changes after version 4.0.0.
|
||||
|
||||
v5.8.10
|
||||
=======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Final maintenance release of community.general major version 5.
|
||||
|
||||
Major Changes
|
||||
-------------
|
||||
|
||||
- The community.general 5.x.y release stream is now effectively **End of Life**. No more releases will be made, and regular CI runs will stop.
|
||||
|
||||
v5.8.9
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Bugfix release.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- passwordstore lookup plugin - make compatible with ansible-core 2.16 (https://github.com/ansible-collections/community.general/pull/6447).
|
||||
|
||||
v5.8.8
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Regular bugfix release.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- archive - reduce RAM usage by generating CRC32 checksum over chunks (https://github.com/ansible-collections/community.general/pull/6274).
|
||||
- flatpak - fixes idempotency detection issues. In some cases the module could fail to properly detect already existing Flatpaks because of a parameter witch only checks the installed apps (https://github.com/ansible-collections/community.general/pull/6289).
|
||||
- icinga2_host - fix the data structure sent to Icinga to make use of host templates and template vars (https://github.com/ansible-collections/community.general/pull/6286).
|
||||
- idrac_redfish_command - allow user to specify ``resource_id`` for ``CreateBiosConfigJob`` to specify an exact manager (https://github.com/ansible-collections/community.general/issues/2090).
|
||||
- ini_file - make ``section`` parameter not required so it is possible to pass ``null`` as a value. This only was possible in the past due to a bug in ansible-core that now has been fixed (https://github.com/ansible-collections/community.general/pull/6404).
|
||||
- rhsm_release - make ``release`` parameter not required so it is possible to pass ``null`` as a value. This only was possible in the past due to a bug in ansible-core that now has been fixed (https://github.com/ansible-collections/community.general/pull/6401).
|
||||
|
||||
v5.8.7
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Bugfix release.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- archive - avoid deprecated exception class on Python 3 (https://github.com/ansible-collections/community.general/pull/6180).
|
||||
- influxdb_user - fix running in check mode when the user does not exist yet (https://github.com/ansible-collections/community.general/pull/6111).
|
||||
- interfaces_file - fix reading options in lines not starting with a space (https://github.com/ansible-collections/community.general/issues/6120).
|
||||
- jail connection plugin - add ``inventory_hostname`` to vars under ``remote_addr``. This is needed for compatibility with ansible-core 2.13 (https://github.com/ansible-collections/community.general/pull/6118).
|
||||
- memset - fix memset urlerror handling (https://github.com/ansible-collections/community.general/pull/6114).
|
||||
- nmcli - fixed idempotency issue for bridge connections. Module forced default value of ``bridge.priority`` to nmcli if not set; if ``bridge.stp`` is disabled nmcli ignores it and keep default (https://github.com/ansible-collections/community.general/issues/3216, https://github.com/ansible-collections/community.general/issues/4683).
|
||||
- nmcli - fixed idempotency issue when module params is set to ``may_fail4=false`` and ``method4=disabled``; in this case nmcli ignores change and keeps their own default value ``yes`` (https://github.com/ansible-collections/community.general/pull/6106).
|
||||
- nmcli - implemented changing mtu value on vlan interfaces (https://github.com/ansible-collections/community.general/issues/4387).
|
||||
- xenorchestra inventory plugin - fix failure to receive objects from server due to not checking the id of the response (https://github.com/ansible-collections/community.general/pull/6227).
|
||||
|
||||
v5.8.6
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Regular bugfix release.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- cartesian and flattened lookup plugins - adjust to parameter deprecation in ansible-core 2.14's ``listify_lookup_plugin_terms`` helper function (https://github.com/ansible-collections/community.general/pull/6074).
|
||||
- cloudflare_dns - fixed the idempotency for SRV DNS records (https://github.com/ansible-collections/community.general/pull/5972).
|
||||
- cloudflare_dns - fixed the possiblity of setting a root-level SRV DNS record (https://github.com/ansible-collections/community.general/pull/5972).
|
||||
- github_webhook - fix always changed state when no secret is provided (https://github.com/ansible-collections/community.general/pull/5994).
|
||||
- keycloak_client - fix accidental replacement of value for attribute ``saml.signing.private.key`` with ``no_log`` in wrong contexts (https://github.com/ansible-collections/community.general/pull/5934).
|
||||
- lxd_* modules, lxd inventory plugin - fix TLS/SSL certificate validation problems by using the correct purpose when creating the TLS context (https://github.com/ansible-collections/community.general/issues/5616, https://github.com/ansible-collections/community.general/pull/6034).
|
||||
- nmcli - fix change handling of values specified as an integer 0 (https://github.com/ansible-collections/community.general/pull/5431).
|
||||
- nmcli - fix failure to handle WIFI settings when connection type not specified (https://github.com/ansible-collections/community.general/pull/5431).
|
||||
- nmcli - fix improper detection of changes to ``wifi.wake-on-wlan`` (https://github.com/ansible-collections/community.general/pull/5431).
|
||||
- nmcli - order is significant for lists of addresses (https://github.com/ansible-collections/community.general/pull/6048).
|
||||
- terraform and timezone - slight refactoring to avoid linter reporting potentially undefined variables (https://github.com/ansible-collections/community.general/pull/5933).
|
||||
- various plugins and modules - remove unnecessary imports (https://github.com/ansible-collections/community.general/pull/5940).
|
||||
- zfs_delegate_admin - zfs allow output can now be parsed when uids/gids are not known to the host system (https://github.com/ansible-collections/community.general/pull/5943).
|
||||
- zypper - make package managing work on readonly filesystem of openSUSE MicroOS (https://github.com/ansible-collections/community.general/pull/5615).
|
||||
|
||||
v5.8.5
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Regular bugfix release.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- ModuleHelper - fix bug when adjusting the name of reserved output variables (https://github.com/ansible-collections/community.general/pull/5755).
|
||||
- alternatives - support subcommands on Fedora 37, which uses ``follower`` instead of ``slave`` (https://github.com/ansible-collections/community.general/pull/5794).
|
||||
- bitwarden lookup plugin - clarify what to do, if the bitwarden vault is not unlocked (https://github.com/ansible-collections/community.general/pull/5811).
|
||||
- dig lookup plugin - correctly handle DNSKEY record type's ``algorithm`` field (https://github.com/ansible-collections/community.general/pull/5914).
|
||||
- gem - fix hang due to interactive prompt for confirmation on specific version uninstall (https://github.com/ansible-collections/community.general/pull/5751).
|
||||
- gitlab_deploy_key - also update ``title`` and not just ``can_push`` (https://github.com/ansible-collections/community.general/pull/5888).
|
||||
- keycloak_user_federation - fixes federation creation issue. When a new federation was created and at the same time a default / standard mapper was also changed / updated the creation process failed as a bad None set variable led to a bad malformed url request (https://github.com/ansible-collections/community.general/pull/5750).
|
||||
- keycloak_user_federation - fixes idempotency detection issues. In some cases the module could fail to properly detect already existing user federations because of a buggy seemingly superflous extra query parameter (https://github.com/ansible-collections/community.general/pull/5732).
|
||||
- loganalytics callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- logdna callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- logstash callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- nsupdate - fix zone lookup. The SOA record for an existing zone is returned as an answer RR and not as an authority RR (https://github.com/ansible-collections/community.general/issues/5817, https://github.com/ansible-collections/community.general/pull/5818).
|
||||
- redfish_utils - removed basic auth HTTP header when performing a GET on the service root resource and when performing a POST to the session collection (https://github.com/ansible-collections/community.general/issues/5886).
|
||||
- splunk callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- sumologic callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- syslog_json callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- terraform - fix ``current`` workspace never getting appended to the ``all`` key in the ``workspace_ctf`` object (https://github.com/ansible-collections/community.general/pull/5735).
|
||||
- terraform - fix ``terraform init`` failure when there are multiple workspaces on the remote backend and when ``default`` workspace is missing by setting ``TF_WORKSPACE`` environmental variable to the value of ``workspace`` when used (https://github.com/ansible-collections/community.general/pull/5735).
|
||||
- terraform module - disable ANSI escape sequences during validation phase (https://github.com/ansible-collections/community.general/pull/5843).
|
||||
- xml - fixed a bug where empty ``children`` list would not be set (https://github.com/ansible-collections/community.general/pull/5808).
|
||||
|
||||
v5.8.4
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Regular bugfix release.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- gconftool2 - fix crash that prevents setting a key (https://github.com/ansible-collections/community.general/issues/5591, https://github.com/ansible-collections/community.general/pull/5687).
|
||||
- gitlab_group_variables - fix dropping variables accidentally when GitLab introduced new properties (https://github.com/ansible-collections/community.general/pull/5667).
|
||||
- gitlab_project_variables - fix dropping variables accidentally when GitLab introduced new properties (https://github.com/ansible-collections/community.general/pull/5667).
|
||||
- lxc_container - fix the arguments of the lxc command which broke the creation and cloning of containers (https://github.com/ansible-collections/community.general/issues/5578).
|
||||
- opkg - fix issue that ``force=reinstall`` would not reinstall an existing package (https://github.com/ansible-collections/community.general/pull/5705).
|
||||
- proxmox_disk - fixed possible issues with redundant ``vmid`` parameter (https://github.com/ansible-collections/community.general/issues/5492, https://github.com/ansible-collections/community.general/pull/5672).
|
||||
- proxmox_nic - fixed possible issues with redundant ``vmid`` parameter (https://github.com/ansible-collections/community.general/issues/5492, https://github.com/ansible-collections/community.general/pull/5672).
|
||||
- unixy callback plugin - fix typo introduced when updating to use Ansible's configuration manager for handling options (https://github.com/ansible-collections/community.general/issues/5600).
|
||||
|
||||
v5.8.3
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Regular bugfix release.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- keycloak_client_rolemapping - calculate ``proposed`` and ``after`` return values properly (https://github.com/ansible-collections/community.general/pull/5619).
|
||||
- keycloak_client_rolemapping - remove only listed mappings with ``state=absent`` (https://github.com/ansible-collections/community.general/pull/5619).
|
||||
- proxmox inventory plugin - handle tags delimited by semicolon instead of comma, which happens from Proxmox 7.3 on (https://github.com/ansible-collections/community.general/pull/5602).
|
||||
- vdo - now uses ``yaml.safe_load()`` to parse command output instead of the deprecated ``yaml.load()`` which is potentially unsafe. Using ``yaml.load()`` without explicitely setting a ``Loader=`` is also an error in pyYAML 6.0 (https://github.com/ansible-collections/community.general/pull/5632).
|
||||
- vmadm - fix for index out of range error in ``get_vm_uuid`` (https://github.com/ansible-collections/community.general/pull/5628).
|
||||
|
||||
v5.8.2
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Regular bugfix release.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- chroot connection plugin - add ``inventory_hostname`` to vars under ``remote_addr``. This is needed for compatibility with ansible-core 2.13 (https://github.com/ansible-collections/community.general/pull/5570).
|
||||
- cmd_runner module utils - fixed bug when handling default cases in ``cmd_runner_fmt.as_map()`` (https://github.com/ansible-collections/community.general/pull/5538).
|
||||
- cmd_runner module utils - formatting arguments ``cmd_runner_fmt.as_fixed()`` was expecting an non-existing argument (https://github.com/ansible-collections/community.general/pull/5538).
|
||||
- unixy callback plugin - fix plugin to work with ansible-core 2.14 by using Ansible's configuration manager for handling options (https://github.com/ansible-collections/community.general/issues/5600).
|
||||
|
||||
v5.8.1
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Regular bugfix release.
|
||||
|
||||
Minor Changes
|
||||
-------------
|
||||
|
||||
- passwordstore lookup plugin - improve error messages to include stderr (https://github.com/ansible-collections/community.general/pull/5436)
|
||||
|
||||
Deprecated Features
|
||||
-------------------
|
||||
|
||||
- Please note that some tools, like the VScode plugin (https://github.com/ansible/vscode-ansible/issues/573), or ``ansible-doc --list --type module``, suggest to replace the correct FQCNs for modules and actions in community.general with internal names that have more than three components. For example, ``community.general.ufw`` is suggested to be replaced by ``community.general.system.ufw``. While these longer names do work, they are considered **internal names** by the collection and are subject to change and be removed at all time. They **will** be removed in community.general 6.0.0 and result in deprecation messages. Avoid using these internal names, and use general three-component FQCNs (``community.general.<name_of_module>``) instead (https://github.com/ansible-collections/community.general/pull/5373).
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- dependent lookup plugin - avoid warning on deprecated parameter for ``Templar.template()`` (https://github.com/ansible-collections/community.general/pull/5543).
|
||||
- iso_create - the module somtimes failed to add folders for Joliet and UDF formats (https://github.com/ansible-collections/community.general/issues/5275).
|
||||
- ldap_attrs - fix bug which caused a ``Bad search filter`` error. The error was occuring when the ldap attribute value contained special characters such as ``(`` or ``*`` (https://github.com/ansible-collections/community.general/issues/5434, https://github.com/ansible-collections/community.general/pull/5435).
|
||||
- nmcli - fix int options idempotence (https://github.com/ansible-collections/community.general/issues/4998).
|
||||
- nsupdate - fix silent failures when updating ``NS`` entries from Bind9 managed DNS zones (https://github.com/ansible-collections/community.general/issues/4657).
|
||||
- one_vm - avoid splitting labels that are ``None`` (https://github.com/ansible-collections/community.general/pull/5489).
|
||||
- proxmox_disk - avoid duplicate ``vmid`` reference (https://github.com/ansible-collections/community.general/issues/5492, https://github.com/ansible-collections/community.general/pull/5493).
|
||||
- snap - allow values in the ``options`` parameter to contain whitespaces (https://github.com/ansible-collections/community.general/pull/5475).
|
||||
|
||||
v5.8.0
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Regular feature and bugfix release.
|
||||
|
||||
Major Changes
|
||||
-------------
|
||||
|
||||
- newrelic_deployment - removed New Relic v1 API, added support for v2 API (https://github.com/ansible-collections/community.general/pull/5341).
|
||||
|
||||
Minor Changes
|
||||
-------------
|
||||
|
||||
- consul - minor refactoring (https://github.com/ansible-collections/community.general/pull/5367).
|
||||
- lxc_container - minor refactoring (https://github.com/ansible-collections/community.general/pull/5358).
|
||||
- nmcli - add ``transport_mode`` configuration for Infiniband devices (https://github.com/ansible-collections/community.general/pull/5361).
|
||||
- opentelemetry callback plugin - send logs. This can be disabled by setting ``disable_logs=false`` (https://github.com/ansible-collections/community.general/pull/4175).
|
||||
- portage - add knobs for Portage's ``--backtrack`` and ``--with-bdeps`` options (https://github.com/ansible-collections/community.general/pull/5349).
|
||||
- portage - use Portage's python module instead of calling gentoolkit-provided program in shell (https://github.com/ansible-collections/community.general/pull/5349).
|
||||
- znode - possibility to use ZooKeeper ACL authentication (https://github.com/ansible-collections/community.general/pull/5306).
|
||||
|
||||
Breaking Changes / Porting Guide
|
||||
--------------------------------
|
||||
|
||||
- newrelic_deployment - ``revision`` is required for v2 API (https://github.com/ansible-collections/community.general/pull/5341).
|
||||
|
||||
Deprecated Features
|
||||
-------------------
|
||||
|
||||
- ArgFormat module utils - deprecated along ``CmdMixin``, in favor of the ``cmd_runner_fmt`` module util (https://github.com/ansible-collections/community.general/pull/5370).
|
||||
- CmdMixin module utils - deprecated in favor of the ``CmdRunner`` module util (https://github.com/ansible-collections/community.general/pull/5370).
|
||||
- CmdModuleHelper module utils - deprecated in favor of the ``CmdRunner`` module util (https://github.com/ansible-collections/community.general/pull/5370).
|
||||
- CmdStateModuleHelper module utils - deprecated in favor of the ``CmdRunner`` module util (https://github.com/ansible-collections/community.general/pull/5370).
|
||||
- django_manage - support for Django releases older than 4.1 has been deprecated and will be removed in community.general 9.0.0 (https://github.com/ansible-collections/community.general/pull/5400).
|
||||
- django_manage - support for the commands ``cleanup``, ``syncdb`` and ``validate`` that have been deprecated in Django long time ago will be removed in community.general 9.0.0 (https://github.com/ansible-collections/community.general/pull/5400).
|
||||
- django_manage - the behavior of "creating the virtual environment when missing" is being deprecated and will be removed in community.general version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5405).
|
||||
- newrelic_deployment - ``appname`` and ``environment`` are no longer valid options in the v2 API. They will be removed in community.general 7.0.0 (https://github.com/ansible-collections/community.general/pull/5341).
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- archive - avoid crash when ``lzma`` is not present and ``format`` is not ``xz`` (https://github.com/ansible-collections/community.general/pull/5393).
|
||||
- ldap_attrs - fix ordering issue by ignoring the ``{x}`` prefix on attribute values (https://github.com/ansible-collections/community.general/issues/977, https://github.com/ansible-collections/community.general/pull/5385).
|
||||
- opentelemetry callback plugin - support opentelemetry-api 1.13.0 that removed support for ``_time_ns`` (https://github.com/ansible-collections/community.general/pull/5342).
|
||||
- pfexec become plugin - remove superflous quotes preventing exe wrap from working as expected (https://github.com/ansible-collections/community.general/issues/3671, https://github.com/ansible-collections/community.general/pull/3889).
|
||||
- pkgng - fix case when ``pkg`` fails when trying to upgrade all packages (https://github.com/ansible-collections/community.general/issues/5363).
|
||||
- proxmox_kvm - fix ``agent`` parameter when boolean value is specified (https://github.com/ansible-collections/community.general/pull/5198).
|
||||
- virtualbox inventory plugin - skip parsing values with keys that have both a value and nested data. Skip parsing values that are nested more than two keys deep (https://github.com/ansible-collections/community.general/issues/5332, https://github.com/ansible-collections/community.general/pull/5348).
|
||||
- xenserver_facts - fix broken ``AnsibleModule`` call that prevented the module from working at all (https://github.com/ansible-collections/community.general/pull/5383).
|
||||
|
||||
New Modules
|
||||
-----------
|
||||
|
||||
Cloud
|
||||
~~~~~
|
||||
|
||||
scaleway
|
||||
^^^^^^^^
|
||||
|
||||
- scaleway_container_registry - Scaleway Container registry management module
|
||||
- scaleway_container_registry_info - Scaleway Container registry info module
|
||||
|
||||
Files
|
||||
~~~~~
|
||||
|
||||
- iso_customize - Add/remove/change files in ISO file
|
||||
|
||||
Remote Management
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
manageiq
|
||||
^^^^^^^^
|
||||
|
||||
- manageiq_policies_info - Listing of resource policy_profiles in ManageIQ
|
||||
- manageiq_tags_info - Retrieve resource tags in ManageIQ
|
||||
|
||||
v5.7.0
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Regular feature and bugfix release.
|
||||
|
||||
Minor Changes
|
||||
-------------
|
||||
|
||||
- bitwarden lookup plugin - add option ``search`` to search for other attributes than name (https://github.com/ansible-collections/community.general/pull/5297).
|
||||
- machinectl become plugin - combine the success command when building the become command to be consistent with other become plugins (https://github.com/ansible-collections/community.general/pull/5287).
|
||||
- netcup_dnsapi - add ``timeout`` parameter (https://github.com/ansible-collections/community.general/pull/5301).
|
||||
- proxmox module utils, the proxmox* modules - add ``api_task_ok`` helper to standardize API task status checks across all proxmox modules (https://github.com/ansible-collections/community.general/pull/5274).
|
||||
- proxmox_snap - add ``unbind`` param to support snapshotting containers with configured mountpoints (https://github.com/ansible-collections/community.general/pull/5274).
|
||||
- redfish_config - add ``SetSessionService`` to set default session timeout policy (https://github.com/ansible-collections/community.general/issues/5008).
|
||||
- terraform - adds capability to handle complex variable structures for ``variables`` parameter in the module. This must be enabled with the new ``complex_vars`` parameter (https://github.com/ansible-collections/community.general/pull/4797).
|
||||
- terraform - run ``terraform init`` with ``-no-color`` not to mess up the stdout of the task (https://github.com/ansible-collections/community.general/pull/5147).
|
||||
|
||||
Deprecated Features
|
||||
-------------------
|
||||
|
||||
- lxc_container - the module will no longer make any effort to support Python 2 (https://github.com/ansible-collections/community.general/pull/5304).
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- ini_file - minor refactor fixing a python lint error (https://github.com/ansible-collections/community.general/pull/5307).
|
||||
- locale_gen - fix support for Ubuntu (https://github.com/ansible-collections/community.general/issues/5281).
|
||||
- lxc_container - the module has been updated to support Python 3 (https://github.com/ansible-collections/community.general/pull/5304).
|
||||
- nmcli - fix error when setting previously unset MAC address, ``gsm.apn`` or ``vpn.data``: current values were being normalized without checking if they might be ``None`` (https://github.com/ansible-collections/community.general/pull/5291).
|
||||
- redhat_subscription - make module idempotent when ``pool_ids`` are used (https://github.com/ansible-collections/community.general/issues/5313).
|
||||
|
||||
New Modules
|
||||
-----------
|
||||
|
||||
Cloud
|
||||
~~~~~
|
||||
|
||||
misc
|
||||
^^^^
|
||||
|
||||
- proxmox_disk - Management of a disk of a Qemu(KVM) VM in a Proxmox VE cluster.
|
||||
|
||||
Identity
|
||||
~~~~~~~~
|
||||
|
||||
keycloak
|
||||
^^^^^^^^
|
||||
|
||||
- keycloak_user_rolemapping - Allows administration of Keycloak user_rolemapping with the Keycloak API
|
||||
|
||||
v5.6.0
|
||||
======
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ Also, consider taking up a valuable, reviewed, but abandoned pull request which
|
||||
* Try committing your changes with an informative but short commit message.
|
||||
* Do not squash your commits and force-push to your branch if not needed. Reviews of your pull request are much easier with individual commits to comprehend the pull request history. All commits of your pull request branch will be squashed into one commit by GitHub upon merge.
|
||||
* Do not add merge commits to your PR. The bot will complain and you will have to rebase ([instructions for rebasing](https://docs.ansible.com/ansible/latest/dev_guide/developing_rebasing.html)) to remove them before your PR can be merged. To avoid that git automatically does merges during pulls, you can configure it to do rebases instead by running `git config pull.rebase true` inside the repository checkout.
|
||||
* Make sure your PR includes a [changelog fragment](https://docs.ansible.com/ansible/devel/community/development_process.html#changelogs-how-to). (You must not include a fragment for new modules or new plugins, except for test and filter plugins. Also you shouldn't include one for docs-only changes. If you're not sure, simply don't include one, we'll tell you whether one is needed or not :) )
|
||||
* Make sure your PR includes a [changelog fragment](https://docs.ansible.com/ansible/devel/community/development_process.html#creating-changelog-fragments). (You must not include a fragment for new modules or new plugins, except for test and filter plugins. Also you shouldn't include one for docs-only changes. If you're not sure, simply don't include one, we'll tell you whether one is needed or not :) )
|
||||
* Avoid reformatting unrelated parts of the codebase in your PR. These types of changes will likely be requested for reversion, create additional work for reviewers, and may cause approval to be delayed.
|
||||
|
||||
You can also read [our Quick-start development guide](https://github.com/ansible/community-docs/blob/main/create_pr_quick_start_guide.rst).
|
||||
@@ -131,19 +131,3 @@ Creating new modules and plugins requires a bit more work than other Pull Reques
|
||||
listed as `maintainers` will be pinged for new issues and PRs that modify the module/plugin or its tests.
|
||||
|
||||
When you add a new plugin/module, we expect that you perform maintainer duty for at least some time after contributing it.
|
||||
|
||||
## pre-commit
|
||||
|
||||
To help ensure high-quality contributions this repository includes a [pre-commit](https://pre-commit.com) configuration which
|
||||
corrects and tests against common issues that would otherwise cause CI to fail. To begin using these pre-commit hooks see
|
||||
the [Installation](#installation) section below.
|
||||
|
||||
This is optional and not required to contribute to this repository.
|
||||
|
||||
### Installation
|
||||
|
||||
Follow the [instructions](https://pre-commit.com/#install) provided with pre-commit and run `pre-commit install` under the repository base. If for any reason you would like to disable the pre-commit hooks run `pre-commit uninstall`.
|
||||
|
||||
This is optional to run it locally.
|
||||
|
||||
You can trigger it locally with `pre-commit run --all-files` or even to run only for a given file `pre-commit run --files YOUR_FILE`.
|
||||
|
||||
13
README.md
13
README.md
@@ -7,6 +7,7 @@ SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# Community General Collection
|
||||
|
||||
[](https://dev.azure.com/ansible/community.general/_build?definitionId=31)
|
||||
[](https://github.com/ansible-collections/community.general/actions)
|
||||
[](https://codecov.io/gh/ansible-collections/community.general)
|
||||
|
||||
This repository contains the `community.general` Ansible Collection. The collection is a part of the Ansible package and includes many modules and plugins supported by Ansible community which are not part of more specialized community collections.
|
||||
@@ -23,7 +24,7 @@ If you encounter abusive behavior violating the [Ansible Code of Conduct](https:
|
||||
|
||||
## Tested with Ansible
|
||||
|
||||
Tested with the current ansible-core 2.11, ansible-core 2.12, ansible-core 2.13 releases and the current development version of ansible-core. Ansible-core versions before 2.11.0 are not supported. This includes all ansible-base 2.10 and Ansible 2.9 releases.
|
||||
Tested with the current ansible-core 2.11, ansible-core 2.12, ansible-core 2.13, ansible-core 2.14, and ansible-core 2.15 releases. Ansible-core versions before 2.11.0 are not supported. This includes all ansible-base 2.10 and Ansible 2.9 releases.
|
||||
|
||||
Parts of this collection will not work with ansible-core 2.11 on Python 3.12+.
|
||||
|
||||
@@ -33,13 +34,13 @@ Some modules and plugins require external libraries. Please check the requiremen
|
||||
|
||||
## Included content
|
||||
|
||||
Please check the included content on the [Ansible Galaxy page for this collection](https://galaxy.ansible.com/community/general) or the [documentation on the Ansible docs site](https://docs.ansible.com/ansible/latest/collections/community/general/).
|
||||
Please check the included content on the [Ansible Galaxy page for this collection](https://galaxy.ansible.com/ui/repo/published/community/general/) or the [documentation on the Ansible docs site](https://docs.ansible.com/ansible/latest/collections/community/general/).
|
||||
|
||||
## Using this collection
|
||||
|
||||
This collection is shipped with the Ansible package. So if you have it installed, no more action is required.
|
||||
|
||||
If you have a minimal installation (only Ansible Core installed) or you want to use the latest version of the collection along with the whole Ansible package, you need to install the collection from [Ansible Galaxy](https://galaxy.ansible.com/community/general) manually with the `ansible-galaxy` command-line tool:
|
||||
If you have a minimal installation (only Ansible Core installed) or you want to use the latest version of the collection along with the whole Ansible package, you need to install the collection from [Ansible Galaxy](https://galaxy.ansible.com/ui/repo/published/community/general/) manually with the `ansible-galaxy` command-line tool:
|
||||
|
||||
ansible-galaxy collection install community.general
|
||||
|
||||
@@ -56,7 +57,7 @@ Note that if you install the collection manually, it will not be upgraded automa
|
||||
ansible-galaxy collection install community.general --upgrade
|
||||
```
|
||||
|
||||
You can also install a specific version of the collection, for example, if you need to downgrade when something is broken in the latest version (please report an issue in this repository). Use the following syntax where `X.Y.Z` can be any [available version](https://galaxy.ansible.com/community/general):
|
||||
You can also install a specific version of the collection, for example, if you need to downgrade when something is broken in the latest version (please report an issue in this repository). Use the following syntax where `X.Y.Z` can be any [available version](https://galaxy.ansible.com/ui/repo/published/community/general/):
|
||||
|
||||
```bash
|
||||
ansible-galaxy collection install community.general:==X.Y.Z
|
||||
@@ -64,6 +65,10 @@ ansible-galaxy collection install community.general:==X.Y.Z
|
||||
|
||||
See [Ansible Using collections](https://docs.ansible.com/ansible/latest/user_guide/collections_using.html) for more details.
|
||||
|
||||
### FQCNs for modules and actions
|
||||
|
||||
⚠️ The collection uses a similar directory structure for modules as the Ansible repository used for Ansible 2.9 and before. This directory structure was never exposed to the user. Due to changes in community.general 5.0.0 (using `meta/runtime.yml` redirects instead of symbolic links) some tooling started exposing the internal module names to end-users. These **internal names**, like `community.general.system.ufw` for the UFW firewall managing module, do work, but should be avoided since they are treated as an implementation detail that can change at any time, even in bugfix releases. Always use the three-component FQCN form, for example `community.general.ufw` for the UFW module. ⚠️
|
||||
|
||||
## Contributing to this collection
|
||||
|
||||
The content of this collection is made by good people just like you, a community of individuals collaborating on making the world better through developing automation software.
|
||||
|
||||
@@ -1160,3 +1160,450 @@ releases:
|
||||
name: pipx_info
|
||||
namespace: packaging.language
|
||||
release_date: '2022-09-13'
|
||||
5.7.0:
|
||||
changes:
|
||||
bugfixes:
|
||||
- ini_file - minor refactor fixing a python lint error (https://github.com/ansible-collections/community.general/pull/5307).
|
||||
- locale_gen - fix support for Ubuntu (https://github.com/ansible-collections/community.general/issues/5281).
|
||||
- lxc_container - the module has been updated to support Python 3 (https://github.com/ansible-collections/community.general/pull/5304).
|
||||
- 'nmcli - fix error when setting previously unset MAC address, ``gsm.apn``
|
||||
or ``vpn.data``: current values were being normalized without checking if
|
||||
they might be ``None`` (https://github.com/ansible-collections/community.general/pull/5291).'
|
||||
- redhat_subscription - make module idempotent when ``pool_ids`` are used (https://github.com/ansible-collections/community.general/issues/5313).
|
||||
deprecated_features:
|
||||
- lxc_container - the module will no longer make any effort to support Python
|
||||
2 (https://github.com/ansible-collections/community.general/pull/5304).
|
||||
minor_changes:
|
||||
- bitwarden lookup plugin - add option ``search`` to search for other attributes
|
||||
than name (https://github.com/ansible-collections/community.general/pull/5297).
|
||||
- machinectl become plugin - combine the success command when building the become
|
||||
command to be consistent with other become plugins (https://github.com/ansible-collections/community.general/pull/5287).
|
||||
- netcup_dnsapi - add ``timeout`` parameter (https://github.com/ansible-collections/community.general/pull/5301).
|
||||
- proxmox module utils, the proxmox* modules - add ``api_task_ok`` helper to
|
||||
standardize API task status checks across all proxmox modules (https://github.com/ansible-collections/community.general/pull/5274).
|
||||
- proxmox_snap - add ``unbind`` param to support snapshotting containers with
|
||||
configured mountpoints (https://github.com/ansible-collections/community.general/pull/5274).
|
||||
- redfish_config - add ``SetSessionService`` to set default session timeout
|
||||
policy (https://github.com/ansible-collections/community.general/issues/5008).
|
||||
- terraform - adds capability to handle complex variable structures for ``variables``
|
||||
parameter in the module. This must be enabled with the new ``complex_vars``
|
||||
parameter (https://github.com/ansible-collections/community.general/pull/4797).
|
||||
- terraform - run ``terraform init`` with ``-no-color`` not to mess up the stdout
|
||||
of the task (https://github.com/ansible-collections/community.general/pull/5147).
|
||||
release_summary: Regular feature and bugfix release.
|
||||
fragments:
|
||||
- 4797-terraform-complex-variables.yml
|
||||
- 5.7.0.yml
|
||||
- 5008-addSetSessionService.yml
|
||||
- 5147-terraform-init-no-color.yml
|
||||
- 5274-proxmox-snap-container-with-mountpoints.yml
|
||||
- 5280-lxc_container-py3.yaml
|
||||
- 5282-locale_gen.yaml
|
||||
- 5287-machinectl-become-success.yml
|
||||
- 5291-fix-nmcli-error-when-setting-unset-mac-address.yaml
|
||||
- 5297-bitwarden-add-search-field.yml
|
||||
- 5301-netcup_dnsapi-timeout.yml
|
||||
- 5307-ini_file-lint.yaml
|
||||
- 5313-fix-redhat_subscription-idempotency-pool_ids.yml
|
||||
modules:
|
||||
- description: Allows administration of Keycloak user_rolemapping with the Keycloak
|
||||
API
|
||||
name: keycloak_user_rolemapping
|
||||
namespace: identity.keycloak
|
||||
- description: Management of a disk of a Qemu(KVM) VM in a Proxmox VE cluster.
|
||||
name: proxmox_disk
|
||||
namespace: cloud.misc
|
||||
release_date: '2022-10-04'
|
||||
5.8.0:
|
||||
changes:
|
||||
breaking_changes:
|
||||
- newrelic_deployment - ``revision`` is required for v2 API (https://github.com/ansible-collections/community.general/pull/5341).
|
||||
bugfixes:
|
||||
- archive - avoid crash when ``lzma`` is not present and ``format`` is not ``xz``
|
||||
(https://github.com/ansible-collections/community.general/pull/5393).
|
||||
- ldap_attrs - fix ordering issue by ignoring the ``{x}`` prefix on attribute
|
||||
values (https://github.com/ansible-collections/community.general/issues/977,
|
||||
https://github.com/ansible-collections/community.general/pull/5385).
|
||||
- opentelemetry callback plugin - support opentelemetry-api 1.13.0 that removed
|
||||
support for ``_time_ns`` (https://github.com/ansible-collections/community.general/pull/5342).
|
||||
- pfexec become plugin - remove superflous quotes preventing exe wrap from working
|
||||
as expected (https://github.com/ansible-collections/community.general/issues/3671,
|
||||
https://github.com/ansible-collections/community.general/pull/3889).
|
||||
- pkgng - fix case when ``pkg`` fails when trying to upgrade all packages (https://github.com/ansible-collections/community.general/issues/5363).
|
||||
- proxmox_kvm - fix ``agent`` parameter when boolean value is specified (https://github.com/ansible-collections/community.general/pull/5198).
|
||||
- virtualbox inventory plugin - skip parsing values with keys that have both
|
||||
a value and nested data. Skip parsing values that are nested more than two
|
||||
keys deep (https://github.com/ansible-collections/community.general/issues/5332,
|
||||
https://github.com/ansible-collections/community.general/pull/5348).
|
||||
- xenserver_facts - fix broken ``AnsibleModule`` call that prevented the module
|
||||
from working at all (https://github.com/ansible-collections/community.general/pull/5383).
|
||||
deprecated_features:
|
||||
- ArgFormat module utils - deprecated along ``CmdMixin``, in favor of the ``cmd_runner_fmt``
|
||||
module util (https://github.com/ansible-collections/community.general/pull/5370).
|
||||
- CmdMixin module utils - deprecated in favor of the ``CmdRunner`` module util
|
||||
(https://github.com/ansible-collections/community.general/pull/5370).
|
||||
- CmdModuleHelper module utils - deprecated in favor of the ``CmdRunner`` module
|
||||
util (https://github.com/ansible-collections/community.general/pull/5370).
|
||||
- CmdStateModuleHelper module utils - deprecated in favor of the ``CmdRunner``
|
||||
module util (https://github.com/ansible-collections/community.general/pull/5370).
|
||||
- django_manage - support for Django releases older than 4.1 has been deprecated
|
||||
and will be removed in community.general 9.0.0 (https://github.com/ansible-collections/community.general/pull/5400).
|
||||
- django_manage - support for the commands ``cleanup``, ``syncdb`` and ``validate``
|
||||
that have been deprecated in Django long time ago will be removed in community.general
|
||||
9.0.0 (https://github.com/ansible-collections/community.general/pull/5400).
|
||||
- django_manage - the behavior of "creating the virtual environment when missing"
|
||||
is being deprecated and will be removed in community.general version 9.0.0
|
||||
(https://github.com/ansible-collections/community.general/pull/5405).
|
||||
- newrelic_deployment - ``appname`` and ``environment`` are no longer valid
|
||||
options in the v2 API. They will be removed in community.general 7.0.0 (https://github.com/ansible-collections/community.general/pull/5341).
|
||||
major_changes:
|
||||
- newrelic_deployment - removed New Relic v1 API, added support for v2 API (https://github.com/ansible-collections/community.general/pull/5341).
|
||||
minor_changes:
|
||||
- consul - minor refactoring (https://github.com/ansible-collections/community.general/pull/5367).
|
||||
- lxc_container - minor refactoring (https://github.com/ansible-collections/community.general/pull/5358).
|
||||
- nmcli - add ``transport_mode`` configuration for Infiniband devices (https://github.com/ansible-collections/community.general/pull/5361).
|
||||
- opentelemetry callback plugin - send logs. This can be disabled by setting
|
||||
``disable_logs=false`` (https://github.com/ansible-collections/community.general/pull/4175).
|
||||
- portage - add knobs for Portage's ``--backtrack`` and ``--with-bdeps`` options
|
||||
(https://github.com/ansible-collections/community.general/pull/5349).
|
||||
- portage - use Portage's python module instead of calling gentoolkit-provided
|
||||
program in shell (https://github.com/ansible-collections/community.general/pull/5349).
|
||||
- znode - possibility to use ZooKeeper ACL authentication (https://github.com/ansible-collections/community.general/pull/5306).
|
||||
release_summary: Regular feature and bugfix release.
|
||||
fragments:
|
||||
- 3671-illumos-pfexec.yml
|
||||
- 4175-opentelemetry_logs.yml
|
||||
- 5.8.0.yml
|
||||
- 5198-proxmox.yml
|
||||
- 5306-add-options-for-authentication.yml
|
||||
- 5341-newrelic-v2-api-changes.yml
|
||||
- 5342-opentelemetry_bug_fix_opentelemetry-api-1.13.yml
|
||||
- 5348-fix-vbox-deeply-nested-hostvars.yml
|
||||
- 5349-drop-gentoolkit-more-knobs.yml
|
||||
- 5358-lxc-container-refactor.yml
|
||||
- 5361-nmcli-add-infiniband-transport-mode.yaml
|
||||
- 5367-consul-refactor.yaml
|
||||
- 5369-pkgng-fix-update-all.yaml
|
||||
- 5370-mh-cmdmixin-deprecation.yaml
|
||||
- 5383-xenserver_facts.yml
|
||||
- 5385-search_s-based-_is_value_present.yaml
|
||||
- 5393-archive.yml
|
||||
- 5400-django-manage-deprecations.yml
|
||||
- 5404-django-manage-venv-deprecation.yml
|
||||
modules:
|
||||
- description: Add/remove/change files in ISO file
|
||||
name: iso_customize
|
||||
namespace: files
|
||||
- description: Listing of resource policy_profiles in ManageIQ
|
||||
name: manageiq_policies_info
|
||||
namespace: remote_management.manageiq
|
||||
- description: Retrieve resource tags in ManageIQ
|
||||
name: manageiq_tags_info
|
||||
namespace: remote_management.manageiq
|
||||
- description: Scaleway Container registry management module
|
||||
name: scaleway_container_registry
|
||||
namespace: cloud.scaleway
|
||||
- description: Scaleway Container registry info module
|
||||
name: scaleway_container_registry_info
|
||||
namespace: cloud.scaleway
|
||||
release_date: '2022-10-25'
|
||||
5.8.1:
|
||||
changes:
|
||||
bugfixes:
|
||||
- dependent lookup plugin - avoid warning on deprecated parameter for ``Templar.template()``
|
||||
(https://github.com/ansible-collections/community.general/pull/5543).
|
||||
- iso_create - the module somtimes failed to add folders for Joliet and UDF
|
||||
formats (https://github.com/ansible-collections/community.general/issues/5275).
|
||||
- ldap_attrs - fix bug which caused a ``Bad search filter`` error. The error
|
||||
was occuring when the ldap attribute value contained special characters such
|
||||
as ``(`` or ``*`` (https://github.com/ansible-collections/community.general/issues/5434,
|
||||
https://github.com/ansible-collections/community.general/pull/5435).
|
||||
- nmcli - fix int options idempotence (https://github.com/ansible-collections/community.general/issues/4998).
|
||||
- nsupdate - fix silent failures when updating ``NS`` entries from Bind9 managed
|
||||
DNS zones (https://github.com/ansible-collections/community.general/issues/4657).
|
||||
- one_vm - avoid splitting labels that are ``None`` (https://github.com/ansible-collections/community.general/pull/5489).
|
||||
- proxmox_disk - avoid duplicate ``vmid`` reference (https://github.com/ansible-collections/community.general/issues/5492,
|
||||
https://github.com/ansible-collections/community.general/pull/5493).
|
||||
- snap - allow values in the ``options`` parameter to contain whitespaces (https://github.com/ansible-collections/community.general/pull/5475).
|
||||
deprecated_features:
|
||||
- Please note that some tools, like the VScode plugin (https://github.com/ansible/vscode-ansible/issues/573),
|
||||
or ``ansible-doc --list --type module``, suggest to replace the correct FQCNs
|
||||
for modules and actions in community.general with internal names that have
|
||||
more than three components. For example, ``community.general.ufw`` is suggested
|
||||
to be replaced by ``community.general.system.ufw``. While these longer names
|
||||
do work, they are considered **internal names** by the collection and are
|
||||
subject to change and be removed at all time. They **will** be removed in
|
||||
community.general 6.0.0 and result in deprecation messages. Avoid using these
|
||||
internal names, and use general three-component FQCNs (``community.general.<name_of_module>``)
|
||||
instead (https://github.com/ansible-collections/community.general/pull/5373).
|
||||
minor_changes:
|
||||
- passwordstore lookup plugin - improve error messages to include stderr (https://github.com/ansible-collections/community.general/pull/5436)
|
||||
release_summary: Regular bugfix release.
|
||||
fragments:
|
||||
- 4998-nmcli-fix-int-options-idempotence.yml
|
||||
- 5.8.1.yml
|
||||
- 5377-nsupdate-ns-records-with-bind.yml
|
||||
- 5435-escape-ldap-param.yml
|
||||
- 5436-passwordstore-errors.yml
|
||||
- 5468-iso-create-not-add-folders.yml
|
||||
- 5475-snap-option-value-whitespace.yml
|
||||
- 5489-nonetype-in-get-vm-by-label.yml
|
||||
- 5493-proxmox.yml
|
||||
- 5543-dependent-template.yml
|
||||
- fqcn-warnings.yml
|
||||
release_date: '2022-11-15'
|
||||
5.8.10:
|
||||
changes:
|
||||
major_changes:
|
||||
- The community.general 5.x.y release stream is now effectively **End of Life**.
|
||||
No more releases will be made, and regular CI runs will stop.
|
||||
release_summary: Final maintenance release of community.general major version
|
||||
5.
|
||||
fragments:
|
||||
- 5.8.10.yml
|
||||
release_date: '2023-11-01'
|
||||
5.8.2:
|
||||
changes:
|
||||
bugfixes:
|
||||
- chroot connection plugin - add ``inventory_hostname`` to vars under ``remote_addr``.
|
||||
This is needed for compatibility with ansible-core 2.13 (https://github.com/ansible-collections/community.general/pull/5570).
|
||||
- cmd_runner module utils - fixed bug when handling default cases in ``cmd_runner_fmt.as_map()``
|
||||
(https://github.com/ansible-collections/community.general/pull/5538).
|
||||
- cmd_runner module utils - formatting arguments ``cmd_runner_fmt.as_fixed()``
|
||||
was expecting an non-existing argument (https://github.com/ansible-collections/community.general/pull/5538).
|
||||
- unixy callback plugin - fix plugin to work with ansible-core 2.14 by using
|
||||
Ansible's configuration manager for handling options (https://github.com/ansible-collections/community.general/issues/5600).
|
||||
release_summary: Regular bugfix release.
|
||||
fragments:
|
||||
- 5.8.2.yml
|
||||
- 5538-cmd-runner-as-fixed.yml
|
||||
- 5570-chroot-plugin-fix-default-inventory_hostname.yml
|
||||
- 5601-unixy-callback-use-config-manager.yml
|
||||
release_date: '2022-11-29'
|
||||
5.8.3:
|
||||
changes:
|
||||
bugfixes:
|
||||
- keycloak_client_rolemapping - calculate ``proposed`` and ``after`` return
|
||||
values properly (https://github.com/ansible-collections/community.general/pull/5619).
|
||||
- keycloak_client_rolemapping - remove only listed mappings with ``state=absent``
|
||||
(https://github.com/ansible-collections/community.general/pull/5619).
|
||||
- proxmox inventory plugin - handle tags delimited by semicolon instead of comma,
|
||||
which happens from Proxmox 7.3 on (https://github.com/ansible-collections/community.general/pull/5602).
|
||||
- vdo - now uses ``yaml.safe_load()`` to parse command output instead of the
|
||||
deprecated ``yaml.load()`` which is potentially unsafe. Using ``yaml.load()``
|
||||
without explicitely setting a ``Loader=`` is also an error in pyYAML 6.0 (https://github.com/ansible-collections/community.general/pull/5632).
|
||||
- vmadm - fix for index out of range error in ``get_vm_uuid`` (https://github.com/ansible-collections/community.general/pull/5628).
|
||||
release_summary: Regular bugfix release.
|
||||
fragments:
|
||||
- 5.8.3.yml
|
||||
- 5602-proxmox-tags.yml
|
||||
- 5619-keycloak-improvements.yml
|
||||
- 5628-fix-vmadm-off-by-one.yml
|
||||
- 5632-vdo-Use-yaml-safe-load-instead-of-yaml-load.yml
|
||||
release_date: '2022-12-05'
|
||||
5.8.4:
|
||||
changes:
|
||||
bugfixes:
|
||||
- gconftool2 - fix crash that prevents setting a key (https://github.com/ansible-collections/community.general/issues/5591,
|
||||
https://github.com/ansible-collections/community.general/pull/5687).
|
||||
- gitlab_group_variables - fix dropping variables accidentally when GitLab introduced
|
||||
new properties (https://github.com/ansible-collections/community.general/pull/5667).
|
||||
- gitlab_project_variables - fix dropping variables accidentally when GitLab
|
||||
introduced new properties (https://github.com/ansible-collections/community.general/pull/5667).
|
||||
- lxc_container - fix the arguments of the lxc command which broke the creation
|
||||
and cloning of containers (https://github.com/ansible-collections/community.general/issues/5578).
|
||||
- opkg - fix issue that ``force=reinstall`` would not reinstall an existing
|
||||
package (https://github.com/ansible-collections/community.general/pull/5705).
|
||||
- proxmox_disk - fixed possible issues with redundant ``vmid`` parameter (https://github.com/ansible-collections/community.general/issues/5492,
|
||||
https://github.com/ansible-collections/community.general/pull/5672).
|
||||
- proxmox_nic - fixed possible issues with redundant ``vmid`` parameter (https://github.com/ansible-collections/community.general/issues/5492,
|
||||
https://github.com/ansible-collections/community.general/pull/5672).
|
||||
- unixy callback plugin - fix typo introduced when updating to use Ansible's
|
||||
configuration manager for handling options (https://github.com/ansible-collections/community.general/issues/5600).
|
||||
release_summary: Regular bugfix release.
|
||||
fragments:
|
||||
- 5.8.4.yml
|
||||
- 5659-fix-lxc_container-command.yml
|
||||
- 5666-gitlab-variables.yml
|
||||
- 5672-proxmox.yml
|
||||
- 5687-gconftool2.yml
|
||||
- 5705-opkg-fix-force-reinstall.yml
|
||||
- 5744-unixy-callback-fix-config-manager-typo.yml
|
||||
release_date: '2023-01-04'
|
||||
5.8.5:
|
||||
changes:
|
||||
bugfixes:
|
||||
- ModuleHelper - fix bug when adjusting the name of reserved output variables
|
||||
(https://github.com/ansible-collections/community.general/pull/5755).
|
||||
- alternatives - support subcommands on Fedora 37, which uses ``follower`` instead
|
||||
of ``slave`` (https://github.com/ansible-collections/community.general/pull/5794).
|
||||
- bitwarden lookup plugin - clarify what to do, if the bitwarden vault is not
|
||||
unlocked (https://github.com/ansible-collections/community.general/pull/5811).
|
||||
- dig lookup plugin - correctly handle DNSKEY record type's ``algorithm`` field
|
||||
(https://github.com/ansible-collections/community.general/pull/5914).
|
||||
- gem - fix hang due to interactive prompt for confirmation on specific version
|
||||
uninstall (https://github.com/ansible-collections/community.general/pull/5751).
|
||||
- gitlab_deploy_key - also update ``title`` and not just ``can_push`` (https://github.com/ansible-collections/community.general/pull/5888).
|
||||
- keycloak_user_federation - fixes federation creation issue. When a new federation
|
||||
was created and at the same time a default / standard mapper was also changed
|
||||
/ updated the creation process failed as a bad None set variable led to a
|
||||
bad malformed url request (https://github.com/ansible-collections/community.general/pull/5750).
|
||||
- 'keycloak_user_federation - fixes idempotency detection issues. In some cases
|
||||
the module could fail to properly detect already existing user federations
|
||||
because of a buggy seemingly superflous extra query parameter (https://github.com/ansible-collections/community.general/pull/5732).
|
||||
|
||||
'
|
||||
- loganalytics callback plugin - adjust type of callback to ``notification``,
|
||||
it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- logdna callback plugin - adjust type of callback to ``notification``, it was
|
||||
incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- logstash callback plugin - adjust type of callback to ``notification``, it
|
||||
was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- nsupdate - fix zone lookup. The SOA record for an existing zone is returned
|
||||
as an answer RR and not as an authority RR (https://github.com/ansible-collections/community.general/issues/5817,
|
||||
https://github.com/ansible-collections/community.general/pull/5818).
|
||||
- redfish_utils - removed basic auth HTTP header when performing a GET on the
|
||||
service root resource and when performing a POST to the session collection
|
||||
(https://github.com/ansible-collections/community.general/issues/5886).
|
||||
- splunk callback plugin - adjust type of callback to ``notification``, it was
|
||||
incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- sumologic callback plugin - adjust type of callback to ``notification``, it
|
||||
was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- syslog_json callback plugin - adjust type of callback to ``notification``,
|
||||
it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- terraform - fix ``current`` workspace never getting appended to the ``all``
|
||||
key in the ``workspace_ctf`` object (https://github.com/ansible-collections/community.general/pull/5735).
|
||||
- terraform - fix ``terraform init`` failure when there are multiple workspaces
|
||||
on the remote backend and when ``default`` workspace is missing by setting
|
||||
``TF_WORKSPACE`` environmental variable to the value of ``workspace`` when
|
||||
used (https://github.com/ansible-collections/community.general/pull/5735).
|
||||
- terraform module - disable ANSI escape sequences during validation phase (https://github.com/ansible-collections/community.general/pull/5843).
|
||||
- xml - fixed a bug where empty ``children`` list would not be set (https://github.com/ansible-collections/community.general/pull/5808).
|
||||
release_summary: Regular bugfix release.
|
||||
fragments:
|
||||
- 5.8.5.yml
|
||||
- 5732-bugfix-keycloak-userfed-idempotency.yml
|
||||
- 5735-terraform-init-fix-when-default-workspace-doesnt-exists.yaml
|
||||
- 5750-bugfixing-keycloak-usrfed-fail-when-update-default-mapper-simultaneously.yml
|
||||
- 5751-gem-fix-uninstall-hang.yml
|
||||
- 5755-mh-fix-output-conflict.yml
|
||||
- 5761-callback-types.yml
|
||||
- 5794-alternatives-fedora37.yml
|
||||
- 5808-xml-children-parameter-does-not-exist.yml
|
||||
- 5811-clarify-bitwarden-error.yml
|
||||
- 5818-nsupdate-fix-zone-lookup.yml
|
||||
- 5843-terraform-validate-no-color.yml
|
||||
- 5886-redfish-correct-basic-auth-usage-on-session-creation.yml
|
||||
- 5888-update-key-title.yml
|
||||
- 5914-dig-dnskey.yml
|
||||
release_date: '2023-01-31'
|
||||
5.8.6:
|
||||
changes:
|
||||
bugfixes:
|
||||
- cartesian and flattened lookup plugins - adjust to parameter deprecation in
|
||||
ansible-core 2.14's ``listify_lookup_plugin_terms`` helper function (https://github.com/ansible-collections/community.general/pull/6074).
|
||||
- cloudflare_dns - fixed the idempotency for SRV DNS records (https://github.com/ansible-collections/community.general/pull/5972).
|
||||
- cloudflare_dns - fixed the possiblity of setting a root-level SRV DNS record
|
||||
(https://github.com/ansible-collections/community.general/pull/5972).
|
||||
- github_webhook - fix always changed state when no secret is provided (https://github.com/ansible-collections/community.general/pull/5994).
|
||||
- keycloak_client - fix accidental replacement of value for attribute ``saml.signing.private.key``
|
||||
with ``no_log`` in wrong contexts (https://github.com/ansible-collections/community.general/pull/5934).
|
||||
- lxd_* modules, lxd inventory plugin - fix TLS/SSL certificate validation problems
|
||||
by using the correct purpose when creating the TLS context (https://github.com/ansible-collections/community.general/issues/5616,
|
||||
https://github.com/ansible-collections/community.general/pull/6034).
|
||||
- nmcli - fix change handling of values specified as an integer 0 (https://github.com/ansible-collections/community.general/pull/5431).
|
||||
- nmcli - fix failure to handle WIFI settings when connection type not specified
|
||||
(https://github.com/ansible-collections/community.general/pull/5431).
|
||||
- nmcli - fix improper detection of changes to ``wifi.wake-on-wlan`` (https://github.com/ansible-collections/community.general/pull/5431).
|
||||
- nmcli - order is significant for lists of addresses (https://github.com/ansible-collections/community.general/pull/6048).
|
||||
- terraform and timezone - slight refactoring to avoid linter reporting potentially
|
||||
undefined variables (https://github.com/ansible-collections/community.general/pull/5933).
|
||||
- various plugins and modules - remove unnecessary imports (https://github.com/ansible-collections/community.general/pull/5940).
|
||||
- zfs_delegate_admin - zfs allow output can now be parsed when uids/gids are
|
||||
not known to the host system (https://github.com/ansible-collections/community.general/pull/5943).
|
||||
- zypper - make package managing work on readonly filesystem of openSUSE MicroOS
|
||||
(https://github.com/ansible-collections/community.general/pull/5615).
|
||||
release_summary: Regular bugfix release.
|
||||
fragments:
|
||||
- 5.8.6.yml
|
||||
- 5431-nmcli-wifi.yml
|
||||
- 5615-zypper-transactional-update.yml
|
||||
- 5933-linting.yml
|
||||
- 5934-fix-keycloak-sanitize_cr.yml
|
||||
- 5943-zfs_delegate_admin-fix-zfs-allow-cannot-parse-unknown-uid-gid.yml
|
||||
- 5972-cloudflare-dns-srv-record.yml
|
||||
- 5994-github-webhook-secret.yml
|
||||
- 6034-lxd-tls.yml
|
||||
- 6048-nmcli-addres-order.yml
|
||||
- 6074-loader_in_listify.yml.yml
|
||||
- remove-unneeded-imports.yml
|
||||
release_date: '2023-02-26'
|
||||
5.8.7:
|
||||
changes:
|
||||
bugfixes:
|
||||
- archive - avoid deprecated exception class on Python 3 (https://github.com/ansible-collections/community.general/pull/6180).
|
||||
- influxdb_user - fix running in check mode when the user does not exist yet
|
||||
(https://github.com/ansible-collections/community.general/pull/6111).
|
||||
- interfaces_file - fix reading options in lines not starting with a space (https://github.com/ansible-collections/community.general/issues/6120).
|
||||
- jail connection plugin - add ``inventory_hostname`` to vars under ``remote_addr``.
|
||||
This is needed for compatibility with ansible-core 2.13 (https://github.com/ansible-collections/community.general/pull/6118).
|
||||
- memset - fix memset urlerror handling (https://github.com/ansible-collections/community.general/pull/6114).
|
||||
- nmcli - fixed idempotency issue for bridge connections. Module forced default
|
||||
value of ``bridge.priority`` to nmcli if not set; if ``bridge.stp`` is disabled
|
||||
nmcli ignores it and keep default (https://github.com/ansible-collections/community.general/issues/3216,
|
||||
https://github.com/ansible-collections/community.general/issues/4683).
|
||||
- nmcli - fixed idempotency issue when module params is set to ``may_fail4=false``
|
||||
and ``method4=disabled``; in this case nmcli ignores change and keeps their
|
||||
own default value ``yes`` (https://github.com/ansible-collections/community.general/pull/6106).
|
||||
- nmcli - implemented changing mtu value on vlan interfaces (https://github.com/ansible-collections/community.general/issues/4387).
|
||||
- xenorchestra inventory plugin - fix failure to receive objects from server
|
||||
due to not checking the id of the response (https://github.com/ansible-collections/community.general/pull/6227).
|
||||
release_summary: Bugfix release.
|
||||
fragments:
|
||||
- 3216-nmcli-bridge-idempotency-fix.yml
|
||||
- 4387-nmcli-mtu-for-vlan-connection-fix.yml
|
||||
- 5.8.7.yml
|
||||
- 6106-nmcli-ipv4-mayfail-idempotency-fix.yml
|
||||
- 6111-influxdb_user-check-mode.yaml
|
||||
- 6114-memset-add-url-error-handling.yml
|
||||
- 6118-jail-plugin-fix-default-inventory_hostname.yml
|
||||
- 6131-fix-interfaces_file-for-no-leading-spaces.yml
|
||||
- 6180-replace-deprecated-badzipfile.yml
|
||||
- 6227-xen-orchestra-check-response-id.yml
|
||||
release_date: '2023-03-27'
|
||||
5.8.8:
|
||||
changes:
|
||||
bugfixes:
|
||||
- archive - reduce RAM usage by generating CRC32 checksum over chunks (https://github.com/ansible-collections/community.general/pull/6274).
|
||||
- flatpak - fixes idempotency detection issues. In some cases the module could
|
||||
fail to properly detect already existing Flatpaks because of a parameter witch
|
||||
only checks the installed apps (https://github.com/ansible-collections/community.general/pull/6289).
|
||||
- icinga2_host - fix the data structure sent to Icinga to make use of host templates
|
||||
and template vars (https://github.com/ansible-collections/community.general/pull/6286).
|
||||
- idrac_redfish_command - allow user to specify ``resource_id`` for ``CreateBiosConfigJob``
|
||||
to specify an exact manager (https://github.com/ansible-collections/community.general/issues/2090).
|
||||
- ini_file - make ``section`` parameter not required so it is possible to pass
|
||||
``null`` as a value. This only was possible in the past due to a bug in ansible-core
|
||||
that now has been fixed (https://github.com/ansible-collections/community.general/pull/6404).
|
||||
- rhsm_release - make ``release`` parameter not required so it is possible to
|
||||
pass ``null`` as a value. This only was possible in the past due to a bug
|
||||
in ansible-core that now has been fixed (https://github.com/ansible-collections/community.general/pull/6401).
|
||||
release_summary: Regular bugfix release.
|
||||
fragments:
|
||||
- 2090-idrac-redfish-resource-id-fix.yml
|
||||
- 5.8.8.yml
|
||||
- 6199-archive-generate-checksum-in-chunks.yml
|
||||
- 6286-icinga2_host-template-and-template-vars.yml
|
||||
- 6289-bugfix-flatpak-check-if-already-installed.yml
|
||||
- 6401-rhsm_release-required.yml
|
||||
- 6404-ini_file-section.yml
|
||||
release_date: '2023-04-24'
|
||||
5.8.9:
|
||||
changes:
|
||||
bugfixes:
|
||||
- passwordstore lookup plugin - make compatible with ansible-core 2.16 (https://github.com/ansible-collections/community.general/pull/6447).
|
||||
release_summary: Bugfix release.
|
||||
fragments:
|
||||
- 5.8.9.yml
|
||||
- passwordstore-lock.yml
|
||||
release_date: '2023-07-16'
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
namespace: community
|
||||
name: general
|
||||
version: 5.6.0
|
||||
version: 5.8.10
|
||||
readme: README.md
|
||||
authors:
|
||||
- Ansible (https://github.com/ansible)
|
||||
|
||||
@@ -560,6 +560,8 @@ plugin_routing:
|
||||
redirect: community.general.files.iso_create
|
||||
iso_extract:
|
||||
redirect: community.general.files.iso_extract
|
||||
iso_customize:
|
||||
redirect: community.general.files.iso_customize
|
||||
jabber:
|
||||
redirect: community.general.notification.jabber
|
||||
java_cert:
|
||||
@@ -612,6 +614,8 @@ plugin_routing:
|
||||
redirect: community.general.identity.keycloak.keycloak_role
|
||||
keycloak_user_federation:
|
||||
redirect: community.general.identity.keycloak.keycloak_user_federation
|
||||
keycloak_user_rolemapping:
|
||||
redirect: community.general.identity.keycloak.keycloak_user_rolemapping
|
||||
keyring:
|
||||
redirect: community.general.system.keyring
|
||||
keyring_info:
|
||||
@@ -706,10 +710,14 @@ plugin_routing:
|
||||
redirect: community.general.remote_management.manageiq.manageiq_group
|
||||
manageiq_policies:
|
||||
redirect: community.general.remote_management.manageiq.manageiq_policies
|
||||
manageiq_policies_info:
|
||||
redirect: community.general.remote_management.manageiq.manageiq_policies_info
|
||||
manageiq_provider:
|
||||
redirect: community.general.remote_management.manageiq.manageiq_provider
|
||||
manageiq_tags:
|
||||
redirect: community.general.remote_management.manageiq.manageiq_tags
|
||||
manageiq_tags_info:
|
||||
redirect: community.general.remote_management.manageiq.manageiq_tags_info
|
||||
manageiq_tenant:
|
||||
redirect: community.general.remote_management.manageiq.manageiq_tenant
|
||||
manageiq_user:
|
||||
@@ -1213,6 +1221,8 @@ plugin_routing:
|
||||
redirect: community.general.cloud.profitbricks.profitbricks_volume_attachments
|
||||
proxmox:
|
||||
redirect: community.general.cloud.misc.proxmox
|
||||
proxmox_disk:
|
||||
redirect: community.general.cloud.misc.proxmox_disk
|
||||
proxmox_domain_info:
|
||||
redirect: community.general.cloud.misc.proxmox_domain_info
|
||||
proxmox_group_info:
|
||||
@@ -1369,6 +1379,10 @@ plugin_routing:
|
||||
redirect: community.general.cloud.scaleway.scaleway_compute
|
||||
scaleway_compute_private_network:
|
||||
redirect: community.general.cloud.scaleway.scaleway_compute_private_network
|
||||
scaleway_container_registry:
|
||||
redirect: community.general.cloud.scaleway.scaleway_container_registry
|
||||
scaleway_container_registry_info:
|
||||
redirect: community.general.cloud.scaleway.scaleway_container_registry_info
|
||||
scaleway_database_backup:
|
||||
redirect: community.general.cloud.scaleway.scaleway_database_backup
|
||||
scaleway_image_facts:
|
||||
|
||||
@@ -117,7 +117,7 @@ class BecomeModule(BecomeBase):
|
||||
|
||||
flags = self.get_option('become_flags')
|
||||
user = self.get_option('become_user')
|
||||
return '%s -q shell %s %s@ %s' % (become, flags, user, cmd)
|
||||
return '%s -q shell %s %s@ %s' % (become, flags, user, self._build_success_command(cmd, shell))
|
||||
|
||||
def check_success(self, b_output):
|
||||
b_output = self.remove_ansi_codes(b_output)
|
||||
|
||||
@@ -102,4 +102,4 @@ class BecomeModule(BecomeBase):
|
||||
|
||||
flags = self.get_option('become_flags')
|
||||
noexe = not self.get_option('wrap_exe')
|
||||
return '%s %s "%s"' % (exe, flags, self._build_success_command(cmd, shell, noexe=noexe))
|
||||
return '%s %s %s' % (exe, flags, self._build_success_command(cmd, shell, noexe=noexe))
|
||||
|
||||
2
plugins/cache/memcached.py
vendored
2
plugins/cache/memcached.py
vendored
@@ -52,11 +52,9 @@ import time
|
||||
from multiprocessing import Lock
|
||||
from itertools import chain
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils.common._collections_compat import MutableSet
|
||||
from ansible.plugins.cache import BaseCacheModule
|
||||
from ansible.release import __version__ as ansible_base_version
|
||||
from ansible.utils.display import Display
|
||||
|
||||
try:
|
||||
|
||||
2
plugins/cache/redis.py
vendored
2
plugins/cache/redis.py
vendored
@@ -67,12 +67,10 @@ import re
|
||||
import time
|
||||
import json
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
|
||||
from ansible.plugins.cache import BaseCacheModule
|
||||
from ansible.release import __version__ as ansible_base_version
|
||||
from ansible.utils.display import Display
|
||||
|
||||
try:
|
||||
|
||||
@@ -16,15 +16,15 @@ DOCUMENTATION = '''
|
||||
- cgroups
|
||||
short_description: Profiles maximum memory usage of tasks and full execution using cgroups
|
||||
description:
|
||||
- This is an ansible callback plugin that profiles maximum memory usage of ansible and individual tasks, and displays a recap at the end using cgroups
|
||||
- This is an ansible callback plugin that profiles maximum memory usage of ansible and individual tasks, and displays a recap at the end using cgroups.
|
||||
notes:
|
||||
- Requires ansible to be run from within a cgroup, such as with C(cgexec -g memory:ansible_profile ansible-playbook ...)
|
||||
- This cgroup should only be used by ansible to get accurate results
|
||||
- To create the cgroup, first use a command such as C(sudo cgcreate -a ec2-user:ec2-user -t ec2-user:ec2-user -g memory:ansible_profile)
|
||||
- Requires ansible to be run from within a cgroup, such as with C(cgexec -g memory:ansible_profile ansible-playbook ...).
|
||||
- This cgroup should only be used by ansible to get accurate results.
|
||||
- To create the cgroup, first use a command such as C(sudo cgcreate -a ec2-user:ec2-user -t ec2-user:ec2-user -g memory:ansible_profile).
|
||||
options:
|
||||
max_mem_file:
|
||||
required: true
|
||||
description: Path to cgroups C(memory.max_usage_in_bytes) file. Example C(/sys/fs/cgroup/memory/ansible_profile/memory.max_usage_in_bytes)
|
||||
description: Path to cgroups C(memory.max_usage_in_bytes) file. Example C(/sys/fs/cgroup/memory/ansible_profile/memory.max_usage_in_bytes).
|
||||
env:
|
||||
- name: CGROUP_MAX_MEM_FILE
|
||||
ini:
|
||||
@@ -32,7 +32,7 @@ DOCUMENTATION = '''
|
||||
key: max_mem_file
|
||||
cur_mem_file:
|
||||
required: true
|
||||
description: Path to C(memory.usage_in_bytes) file. Example C(/sys/fs/cgroup/memory/ansible_profile/memory.usage_in_bytes)
|
||||
description: Path to C(memory.usage_in_bytes) file. Example C(/sys/fs/cgroup/memory/ansible_profile/memory.usage_in_bytes).
|
||||
env:
|
||||
- name: CGROUP_CUR_MEM_FILE
|
||||
ini:
|
||||
|
||||
@@ -13,8 +13,8 @@ DOCUMENTATION = '''
|
||||
type: aggregate
|
||||
short_description: demo callback that adds play/task context
|
||||
description:
|
||||
- Displays some play and task context along with normal output
|
||||
- This is mostly for demo purposes
|
||||
- Displays some play and task context along with normal output.
|
||||
- This is mostly for demo purposes.
|
||||
requirements:
|
||||
- whitelist in configuration
|
||||
'''
|
||||
|
||||
@@ -21,13 +21,12 @@ DOCUMENTATION = '''
|
||||
extends_documentation_fragment:
|
||||
- default_callback
|
||||
requirements:
|
||||
- set as stdout callback in ansible.cfg (stdout_callback = counter_enabled)
|
||||
- set as stdout callback in C(ansible.cfg) (C(stdout_callback = counter_enabled))
|
||||
'''
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.plugins.callback import CallbackBase
|
||||
from ansible.utils.color import colorize, hostcolor
|
||||
from ansible.template import Templar
|
||||
from ansible.playbook.task_include import TaskInclude
|
||||
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ short_description: minimal stdout output
|
||||
extends_documentation_fragment:
|
||||
- default_callback
|
||||
description:
|
||||
- When in verbose mode it will act the same as the default callback
|
||||
- When in verbose mode it will act the same as the default callback.
|
||||
author:
|
||||
- Dag Wieers (@dagwieers)
|
||||
requirements:
|
||||
|
||||
@@ -786,10 +786,6 @@ playbook.yml: >
|
||||
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from ansible import constants as C
|
||||
from ansible.playbook.task_include import TaskInclude
|
||||
from ansible.plugins.callback import CallbackBase
|
||||
from ansible.utils.color import colorize, hostcolor
|
||||
from ansible.template import Templar
|
||||
from ansible.vars.manager import VariableManager
|
||||
from ansible.plugins.callback.default import CallbackModule as Default
|
||||
|
||||
@@ -13,10 +13,10 @@ DOCUMENTATION = '''
|
||||
type: notification
|
||||
short_description: post task events to a jabber server
|
||||
description:
|
||||
- The chatty part of ChatOps with a Hipchat server as a target
|
||||
- The chatty part of ChatOps with a Hipchat server as a target.
|
||||
- This callback plugin sends status updates to a HipChat channel during playbook execution.
|
||||
requirements:
|
||||
- xmpp (python lib https://github.com/ArchipelProject/xmpppy)
|
||||
- xmpp (Python library U(https://github.com/ArchipelProject/xmpppy))
|
||||
options:
|
||||
server:
|
||||
description: connection info to jabber server
|
||||
|
||||
@@ -13,10 +13,10 @@ DOCUMENTATION = '''
|
||||
type: notification
|
||||
short_description: write playbook output to log file
|
||||
description:
|
||||
- This callback writes playbook output to a file per host in the C(/var/log/ansible/hosts) directory
|
||||
- This callback writes playbook output to a file per host in the C(/var/log/ansible/hosts) directory.
|
||||
requirements:
|
||||
- Whitelist in configuration
|
||||
- A writeable /var/log/ansible/hosts directory by the user executing Ansible on the controller
|
||||
- A writeable C(/var/log/ansible/hosts) directory by the user executing Ansible on the controller
|
||||
options:
|
||||
log_folder:
|
||||
default: /var/log/ansible/hosts
|
||||
|
||||
@@ -8,7 +8,7 @@ __metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
name: loganalytics
|
||||
type: aggregate
|
||||
type: notification
|
||||
short_description: Posts task results to Azure Log Analytics
|
||||
author: "Cyrus Li (@zhcli) <cyrus1006@gmail.com>"
|
||||
description:
|
||||
@@ -54,7 +54,6 @@ examples: |
|
||||
import hashlib
|
||||
import hmac
|
||||
import base64
|
||||
import logging
|
||||
import json
|
||||
import uuid
|
||||
import socket
|
||||
@@ -155,7 +154,7 @@ class AzureLogAnalyticsSource(object):
|
||||
|
||||
class CallbackModule(CallbackBase):
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'loganalytics'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
||||
@@ -9,17 +9,17 @@ __metaclass__ = type
|
||||
DOCUMENTATION = '''
|
||||
author: Unknown (!UNKNOWN)
|
||||
name: logdna
|
||||
type: aggregate
|
||||
type: notification
|
||||
short_description: Sends playbook logs to LogDNA
|
||||
description:
|
||||
- This callback will report logs from playbook actions, tasks, and events to LogDNA (https://app.logdna.com)
|
||||
- This callback will report logs from playbook actions, tasks, and events to LogDNA (U(https://app.logdna.com)).
|
||||
requirements:
|
||||
- LogDNA Python Library (https://github.com/logdna/python)
|
||||
- LogDNA Python Library (U(https://github.com/logdna/python))
|
||||
- whitelisting in configuration
|
||||
options:
|
||||
conf_key:
|
||||
required: true
|
||||
description: LogDNA Ingestion Key
|
||||
description: LogDNA Ingestion Key.
|
||||
type: string
|
||||
env:
|
||||
- name: LOGDNA_INGESTION_KEY
|
||||
@@ -28,7 +28,7 @@ DOCUMENTATION = '''
|
||||
key: conf_key
|
||||
plugin_ignore_errors:
|
||||
required: false
|
||||
description: Whether to ignore errors on failing or not
|
||||
description: Whether to ignore errors on failing or not.
|
||||
type: boolean
|
||||
env:
|
||||
- name: ANSIBLE_IGNORE_ERRORS
|
||||
@@ -38,7 +38,7 @@ DOCUMENTATION = '''
|
||||
default: false
|
||||
conf_hostname:
|
||||
required: false
|
||||
description: Alternative Host Name; the current host name by default
|
||||
description: Alternative Host Name; the current host name by default.
|
||||
type: string
|
||||
env:
|
||||
- name: LOGDNA_HOSTNAME
|
||||
@@ -47,7 +47,7 @@ DOCUMENTATION = '''
|
||||
key: conf_hostname
|
||||
conf_tags:
|
||||
required: false
|
||||
description: Tags
|
||||
description: Tags.
|
||||
type: string
|
||||
env:
|
||||
- name: LOGDNA_TAGS
|
||||
@@ -111,7 +111,7 @@ def isJSONable(obj):
|
||||
class CallbackModule(CallbackBase):
|
||||
|
||||
CALLBACK_VERSION = 0.1
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'community.general.logdna'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
||||
@@ -13,15 +13,15 @@ DOCUMENTATION = '''
|
||||
short_description: Sends events to Logentries
|
||||
description:
|
||||
- This callback plugin will generate JSON objects and send them to Logentries via TCP for auditing/debugging purposes.
|
||||
- Before 2.4, if you wanted to use an ini configuration, the file must be placed in the same directory as this plugin and named logentries.ini
|
||||
- Before 2.4, if you wanted to use an ini configuration, the file must be placed in the same directory as this plugin and named C(logentries.ini).
|
||||
- In 2.4 and above you can just put it in the main Ansible configuration file.
|
||||
requirements:
|
||||
- whitelisting in configuration
|
||||
- certifi (python library)
|
||||
- flatdict (python library), if you want to use the 'flatten' option
|
||||
- certifi (Python library)
|
||||
- flatdict (Python library), if you want to use the 'flatten' option
|
||||
options:
|
||||
api:
|
||||
description: URI to the Logentries API
|
||||
description: URI to the Logentries API.
|
||||
env:
|
||||
- name: LOGENTRIES_API
|
||||
default: data.logentries.com
|
||||
@@ -29,7 +29,7 @@ DOCUMENTATION = '''
|
||||
- section: callback_logentries
|
||||
key: api
|
||||
port:
|
||||
description: HTTP port to use when connecting to the API
|
||||
description: HTTP port to use when connecting to the API.
|
||||
env:
|
||||
- name: LOGENTRIES_PORT
|
||||
default: 80
|
||||
@@ -37,7 +37,7 @@ DOCUMENTATION = '''
|
||||
- section: callback_logentries
|
||||
key: port
|
||||
tls_port:
|
||||
description: Port to use when connecting to the API when TLS is enabled
|
||||
description: Port to use when connecting to the API when TLS is enabled.
|
||||
env:
|
||||
- name: LOGENTRIES_TLS_PORT
|
||||
default: 443
|
||||
@@ -45,7 +45,7 @@ DOCUMENTATION = '''
|
||||
- section: callback_logentries
|
||||
key: tls_port
|
||||
token:
|
||||
description: The logentries "TCP token"
|
||||
description: The logentries C(TCP token).
|
||||
env:
|
||||
- name: LOGENTRIES_ANSIBLE_TOKEN
|
||||
required: true
|
||||
@@ -54,7 +54,7 @@ DOCUMENTATION = '''
|
||||
key: token
|
||||
use_tls:
|
||||
description:
|
||||
- Toggle to decide whether to use TLS to encrypt the communications with the API server
|
||||
- Toggle to decide whether to use TLS to encrypt the communications with the API server.
|
||||
env:
|
||||
- name: LOGENTRIES_USE_TLS
|
||||
default: false
|
||||
@@ -63,7 +63,7 @@ DOCUMENTATION = '''
|
||||
- section: callback_logentries
|
||||
key: use_tls
|
||||
flatten:
|
||||
description: flatten complex data structures into a single dictionary with complex keys
|
||||
description: Flatten complex data structures into a single dictionary with complex keys.
|
||||
type: boolean
|
||||
default: false
|
||||
env:
|
||||
|
||||
@@ -13,13 +13,13 @@ DOCUMENTATION = r'''
|
||||
type: notification
|
||||
short_description: Sends events to Logstash
|
||||
description:
|
||||
- This callback will report facts and task events to Logstash https://www.elastic.co/products/logstash
|
||||
- This callback will report facts and task events to Logstash U(https://www.elastic.co/products/logstash).
|
||||
requirements:
|
||||
- whitelisting in configuration
|
||||
- logstash (python library)
|
||||
- logstash (Python library)
|
||||
options:
|
||||
server:
|
||||
description: Address of the Logstash server
|
||||
description: Address of the Logstash server.
|
||||
env:
|
||||
- name: LOGSTASH_SERVER
|
||||
ini:
|
||||
@@ -28,7 +28,7 @@ DOCUMENTATION = r'''
|
||||
version_added: 1.0.0
|
||||
default: localhost
|
||||
port:
|
||||
description: Port on which logstash is listening
|
||||
description: Port on which logstash is listening.
|
||||
env:
|
||||
- name: LOGSTASH_PORT
|
||||
ini:
|
||||
@@ -37,7 +37,7 @@ DOCUMENTATION = r'''
|
||||
version_added: 1.0.0
|
||||
default: 5000
|
||||
type:
|
||||
description: Message type
|
||||
description: Message type.
|
||||
env:
|
||||
- name: LOGSTASH_TYPE
|
||||
ini:
|
||||
@@ -54,7 +54,7 @@ DOCUMENTATION = r'''
|
||||
env:
|
||||
- name: LOGSTASH_PRE_COMMAND
|
||||
format_version:
|
||||
description: Logging format
|
||||
description: Logging format.
|
||||
type: str
|
||||
version_added: 2.0.0
|
||||
ini:
|
||||
@@ -113,7 +113,7 @@ from ansible.plugins.callback import CallbackBase
|
||||
class CallbackModule(CallbackBase):
|
||||
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'community.general.logstash'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
||||
@@ -78,7 +78,6 @@ import re
|
||||
import email.utils
|
||||
import smtplib
|
||||
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.common.text.converters import to_bytes
|
||||
from ansible.parsing.ajson import AnsibleJSONEncoder
|
||||
from ansible.plugins.callback import CallbackBase
|
||||
|
||||
@@ -67,9 +67,6 @@ DOCUMENTATION = '''
|
||||
type: string
|
||||
'''
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||
from ansible.module_utils.common.text.converters import to_bytes
|
||||
from ansible.module_utils.urls import open_url
|
||||
|
||||
@@ -15,7 +15,7 @@ DOCUMENTATION = '''
|
||||
- set as main display callback
|
||||
short_description: Don't display stuff to screen
|
||||
description:
|
||||
- This callback prevents outputing events to screen
|
||||
- This callback prevents outputing events to screen.
|
||||
'''
|
||||
|
||||
from ansible.plugins.callback import CallbackBase
|
||||
|
||||
@@ -62,6 +62,17 @@ DOCUMENTATION = '''
|
||||
- The L(W3C Trace Context header traceparent,https://www.w3.org/TR/trace-context-1/#traceparent-header).
|
||||
env:
|
||||
- name: TRACEPARENT
|
||||
disable_logs:
|
||||
default: false
|
||||
type: bool
|
||||
description:
|
||||
- Disable sending logs.
|
||||
env:
|
||||
- name: ANSIBLE_OPENTELEMETRY_DISABLE_LOGS
|
||||
ini:
|
||||
- section: callback_opentelemetry
|
||||
key: disable_logs
|
||||
version_added: 5.8.0
|
||||
requirements:
|
||||
- opentelemetry-api (Python library)
|
||||
- opentelemetry-exporter-otlp (Python library)
|
||||
@@ -110,13 +121,32 @@ try:
|
||||
from opentelemetry.sdk.trace.export import (
|
||||
BatchSpanProcessor
|
||||
)
|
||||
from opentelemetry.util._time import _time_ns
|
||||
|
||||
# Support for opentelemetry-api <= 1.12
|
||||
try:
|
||||
from opentelemetry.util._time import _time_ns
|
||||
except ImportError as imp_exc:
|
||||
OTEL_LIBRARY_TIME_NS_ERROR = imp_exc
|
||||
else:
|
||||
OTEL_LIBRARY_TIME_NS_ERROR = None
|
||||
|
||||
except ImportError as imp_exc:
|
||||
OTEL_LIBRARY_IMPORT_ERROR = imp_exc
|
||||
OTEL_LIBRARY_TIME_NS_ERROR = imp_exc
|
||||
else:
|
||||
OTEL_LIBRARY_IMPORT_ERROR = None
|
||||
|
||||
|
||||
if sys.version_info >= (3, 7):
|
||||
time_ns = time.time_ns
|
||||
elif not OTEL_LIBRARY_TIME_NS_ERROR:
|
||||
time_ns = _time_ns
|
||||
else:
|
||||
def time_ns():
|
||||
# Support versions older than 3.7 with opentelemetry-api > 1.12
|
||||
return int(time.time() * 1e9)
|
||||
|
||||
|
||||
class TaskData:
|
||||
"""
|
||||
Data about an individual task.
|
||||
@@ -128,12 +158,10 @@ class TaskData:
|
||||
self.path = path
|
||||
self.play = play
|
||||
self.host_data = OrderedDict()
|
||||
if sys.version_info >= (3, 7):
|
||||
self.start = time.time_ns()
|
||||
else:
|
||||
self.start = _time_ns()
|
||||
self.start = time_ns()
|
||||
self.action = action
|
||||
self.args = args
|
||||
self.dump = None
|
||||
|
||||
def add_host(self, host):
|
||||
if host.uuid in self.host_data:
|
||||
@@ -156,10 +184,7 @@ class HostData:
|
||||
self.name = name
|
||||
self.status = status
|
||||
self.result = result
|
||||
if sys.version_info >= (3, 7):
|
||||
self.finish = time.time_ns()
|
||||
else:
|
||||
self.finish = _time_ns()
|
||||
self.finish = time_ns()
|
||||
|
||||
|
||||
class OpenTelemetrySource(object):
|
||||
@@ -199,7 +224,7 @@ class OpenTelemetrySource(object):
|
||||
|
||||
tasks_data[uuid] = TaskData(uuid, name, path, play_name, action, args)
|
||||
|
||||
def finish_task(self, tasks_data, status, result):
|
||||
def finish_task(self, tasks_data, status, result, dump):
|
||||
""" record the results of a task for a single host """
|
||||
|
||||
task_uuid = result._task._uuid
|
||||
@@ -216,9 +241,10 @@ class OpenTelemetrySource(object):
|
||||
if self.ansible_version is None and hasattr(result, '_task_fields') and result._task_fields['args'].get('_ansible_version'):
|
||||
self.ansible_version = result._task_fields['args'].get('_ansible_version')
|
||||
|
||||
task.dump = dump
|
||||
task.add_host(HostData(host_uuid, host_name, status, result))
|
||||
|
||||
def generate_distributed_traces(self, otel_service_name, ansible_playbook, tasks_data, status, traceparent):
|
||||
def generate_distributed_traces(self, otel_service_name, ansible_playbook, tasks_data, status, traceparent, disable_logs):
|
||||
""" generate distributed traces from the collected TaskData and HostData """
|
||||
|
||||
tasks = []
|
||||
@@ -254,9 +280,9 @@ class OpenTelemetrySource(object):
|
||||
for task in tasks:
|
||||
for host_uuid, host_data in task.host_data.items():
|
||||
with tracer.start_as_current_span(task.name, start_time=task.start, end_on_exit=False) as span:
|
||||
self.update_span_data(task, host_data, span)
|
||||
self.update_span_data(task, host_data, span, disable_logs)
|
||||
|
||||
def update_span_data(self, task_data, host_data, span):
|
||||
def update_span_data(self, task_data, host_data, span, disable_logs):
|
||||
""" update the span with the given TaskData and HostData """
|
||||
|
||||
name = '[%s] %s: %s' % (host_data.name, task_data.play, task_data.name)
|
||||
@@ -302,6 +328,9 @@ class OpenTelemetrySource(object):
|
||||
self.set_span_attribute(span, "ansible.task.host.status", host_data.status)
|
||||
# This will allow to enrich the service map
|
||||
self.add_attributes_for_service_map_if_possible(span, task_data)
|
||||
# Send logs
|
||||
if not disable_logs:
|
||||
span.add_event(task_data.dump)
|
||||
span.end(end_time=host_data.finish)
|
||||
|
||||
def set_span_attribute(self, span, attributeName, attributeValue):
|
||||
@@ -405,6 +434,7 @@ class CallbackModule(CallbackBase):
|
||||
def __init__(self, display=None):
|
||||
super(CallbackModule, self).__init__(display=display)
|
||||
self.hide_task_arguments = None
|
||||
self.disable_logs = None
|
||||
self.otel_service_name = None
|
||||
self.ansible_playbook = None
|
||||
self.play_name = None
|
||||
@@ -435,6 +465,8 @@ class CallbackModule(CallbackBase):
|
||||
|
||||
self.hide_task_arguments = self.get_option('hide_task_arguments')
|
||||
|
||||
self.disable_logs = self.get_option('disable_logs')
|
||||
|
||||
self.otel_service_name = self.get_option('otel_service_name')
|
||||
|
||||
if not self.otel_service_name:
|
||||
@@ -491,28 +523,32 @@ class CallbackModule(CallbackBase):
|
||||
self.opentelemetry.finish_task(
|
||||
self.tasks_data,
|
||||
status,
|
||||
result
|
||||
result,
|
||||
self._dump_results(result._result)
|
||||
)
|
||||
|
||||
def v2_runner_on_ok(self, result):
|
||||
self.opentelemetry.finish_task(
|
||||
self.tasks_data,
|
||||
'ok',
|
||||
result
|
||||
result,
|
||||
self._dump_results(result._result)
|
||||
)
|
||||
|
||||
def v2_runner_on_skipped(self, result):
|
||||
self.opentelemetry.finish_task(
|
||||
self.tasks_data,
|
||||
'skipped',
|
||||
result
|
||||
result,
|
||||
self._dump_results(result._result)
|
||||
)
|
||||
|
||||
def v2_playbook_on_include(self, included_file):
|
||||
self.opentelemetry.finish_task(
|
||||
self.tasks_data,
|
||||
'included',
|
||||
included_file
|
||||
included_file,
|
||||
""
|
||||
)
|
||||
|
||||
def v2_playbook_on_stats(self, stats):
|
||||
@@ -525,7 +561,8 @@ class CallbackModule(CallbackBase):
|
||||
self.ansible_playbook,
|
||||
self.tasks_data,
|
||||
status,
|
||||
self.traceparent
|
||||
self.traceparent,
|
||||
self.disable_logs
|
||||
)
|
||||
|
||||
def v2_runner_on_async_failed(self, result, **kwargs):
|
||||
|
||||
@@ -14,12 +14,12 @@ DOCUMENTATION = '''
|
||||
type: notification
|
||||
requirements:
|
||||
- whitelisting in configuration
|
||||
- the '/usr/bin/say' command line program (standard on macOS) or 'espeak' command line program
|
||||
- the C(/usr/bin/say) command line program (standard on macOS) or C(espeak) command line program
|
||||
short_description: notify using software speech synthesizer
|
||||
description:
|
||||
- This plugin will use the 'say' or 'espeak' program to "speak" about play events.
|
||||
- This plugin will use the C(say) or C(espeak) program to "speak" about play events.
|
||||
notes:
|
||||
- In 2.8, this callback has been renamed from C(osx_say) into M(community.general.say).
|
||||
- In Ansible 2.8, this callback has been renamed from C(osx_say) into M(community.general.say).
|
||||
'''
|
||||
|
||||
import platform
|
||||
|
||||
@@ -22,7 +22,7 @@ DOCUMENTATION = '''
|
||||
options:
|
||||
nocolor:
|
||||
default: false
|
||||
description: This setting allows suppressing colorizing output
|
||||
description: This setting allows suppressing colorizing output.
|
||||
env:
|
||||
- name: ANSIBLE_NOCOLOR
|
||||
- name: ANSIBLE_SELECTIVE_DONT_COLORIZE
|
||||
|
||||
@@ -18,11 +18,11 @@ DOCUMENTATION = '''
|
||||
short_description: Sends play events to a Slack channel
|
||||
description:
|
||||
- This is an ansible callback plugin that sends status updates to a Slack channel during playbook execution.
|
||||
- Before 2.4 only environment variables were available for configuring this plugin
|
||||
- Before Ansible 2.4 only environment variables were available for configuring this plugin.
|
||||
options:
|
||||
webhook_url:
|
||||
required: true
|
||||
description: Slack Webhook URL
|
||||
description: Slack Webhook URL.
|
||||
env:
|
||||
- name: SLACK_WEBHOOK_URL
|
||||
ini:
|
||||
@@ -45,7 +45,7 @@ DOCUMENTATION = '''
|
||||
- section: callback_slack
|
||||
key: username
|
||||
validate_certs:
|
||||
description: validate the SSL certificate of the Slack server. (For HTTPS URLs)
|
||||
description: Validate the SSL certificate of the Slack server for HTTPS URLs.
|
||||
env:
|
||||
- name: SLACK_VALIDATE_CERTS
|
||||
ini:
|
||||
|
||||
@@ -8,27 +8,27 @@ __metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
name: splunk
|
||||
type: aggregate
|
||||
type: notification
|
||||
short_description: Sends task result events to Splunk HTTP Event Collector
|
||||
author: "Stuart Hirst (!UNKNOWN) <support@convergingdata.com>"
|
||||
description:
|
||||
- This callback plugin will send task results as JSON formatted events to a Splunk HTTP collector.
|
||||
- The companion Splunk Monitoring & Diagnostics App is available here "https://splunkbase.splunk.com/app/4023/"
|
||||
- The companion Splunk Monitoring & Diagnostics App is available here U(https://splunkbase.splunk.com/app/4023/).
|
||||
- Credit to "Ryan Currah (@ryancurrah)" for original source upon which this is based.
|
||||
requirements:
|
||||
- Whitelisting this callback plugin
|
||||
- 'Create a HTTP Event Collector in Splunk'
|
||||
- 'Define the url and token in ansible.cfg'
|
||||
- 'Define the URL and token in C(ansible.cfg)'
|
||||
options:
|
||||
url:
|
||||
description: URL to the Splunk HTTP collector source
|
||||
description: URL to the Splunk HTTP collector source.
|
||||
env:
|
||||
- name: SPLUNK_URL
|
||||
ini:
|
||||
- section: callback_splunk
|
||||
key: url
|
||||
authtoken:
|
||||
description: Token to authenticate the connection to the Splunk HTTP collector
|
||||
description: Token to authenticate the connection to the Splunk HTTP collector.
|
||||
env:
|
||||
- name: SPLUNK_AUTHTOKEN
|
||||
ini:
|
||||
@@ -48,7 +48,7 @@ DOCUMENTATION = '''
|
||||
version_added: '1.0.0'
|
||||
include_milliseconds:
|
||||
description: Whether to include milliseconds as part of the generated timestamp field in the event
|
||||
sent to the Splunk HTTP collector
|
||||
sent to the Splunk HTTP collector.
|
||||
env:
|
||||
- name: SPLUNK_INCLUDE_MILLISECONDS
|
||||
ini:
|
||||
@@ -165,7 +165,7 @@ class SplunkHTTPCollectorSource(object):
|
||||
|
||||
class CallbackModule(CallbackBase):
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'community.general.splunk'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
||||
@@ -8,18 +8,18 @@ __metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
name: sumologic
|
||||
type: aggregate
|
||||
type: notification
|
||||
short_description: Sends task result events to Sumologic
|
||||
author: "Ryan Currah (@ryancurrah)"
|
||||
description:
|
||||
- This callback plugin will send task results as JSON formatted events to a Sumologic HTTP collector source
|
||||
- This callback plugin will send task results as JSON formatted events to a Sumologic HTTP collector source.
|
||||
requirements:
|
||||
- Whitelisting this callback plugin
|
||||
- 'Create a HTTP collector source in Sumologic and specify a custom timestamp format of C(yyyy-MM-dd HH:mm:ss ZZZZ) and a custom timestamp locator
|
||||
of C("timestamp": "(.*)")'
|
||||
options:
|
||||
url:
|
||||
description: URL to the Sumologic HTTP collector source
|
||||
description: URL to the Sumologic HTTP collector source.
|
||||
env:
|
||||
- name: SUMOLOGIC_URL
|
||||
ini:
|
||||
@@ -28,7 +28,7 @@ options:
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
examples: >
|
||||
examples: |
|
||||
To enable, add this to your ansible.cfg file in the defaults block
|
||||
[defaults]
|
||||
callback_whitelist = community.general.sumologic
|
||||
@@ -111,7 +111,7 @@ class SumologicHTTPCollectorSource(object):
|
||||
|
||||
class CallbackModule(CallbackBase):
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'community.general.sumologic'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
||||
@@ -15,11 +15,11 @@ DOCUMENTATION = '''
|
||||
- whitelist in configuration
|
||||
short_description: sends JSON events to syslog
|
||||
description:
|
||||
- This plugin logs ansible-playbook and ansible runs to a syslog server in JSON format
|
||||
- Before Ansible 2.9 only environment variables were available for configuration
|
||||
- This plugin logs ansible-playbook and ansible runs to a syslog server in JSON format.
|
||||
- Before Ansible 2.9 only environment variables were available for configuration.
|
||||
options:
|
||||
server:
|
||||
description: syslog server that will receive the event
|
||||
description: Syslog server that will receive the event.
|
||||
env:
|
||||
- name: SYSLOG_SERVER
|
||||
default: localhost
|
||||
@@ -27,7 +27,7 @@ DOCUMENTATION = '''
|
||||
- section: callback_syslog_json
|
||||
key: syslog_server
|
||||
port:
|
||||
description: port on which the syslog server is listening
|
||||
description: Port on which the syslog server is listening.
|
||||
env:
|
||||
- name: SYSLOG_PORT
|
||||
default: 514
|
||||
@@ -35,7 +35,7 @@ DOCUMENTATION = '''
|
||||
- section: callback_syslog_json
|
||||
key: syslog_port
|
||||
facility:
|
||||
description: syslog facility to log as
|
||||
description: Syslog facility to log as.
|
||||
env:
|
||||
- name: SYSLOG_FACILITY
|
||||
default: user
|
||||
@@ -54,9 +54,6 @@ DOCUMENTATION = '''
|
||||
version_added: 4.5.0
|
||||
'''
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
import logging
|
||||
import logging.handlers
|
||||
|
||||
@@ -71,7 +68,7 @@ class CallbackModule(CallbackBase):
|
||||
"""
|
||||
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'community.general.syslog_json'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
||||
@@ -63,7 +63,7 @@ class CallbackModule(CallbackModule_default):
|
||||
|
||||
def _preprocess_result(self, result):
|
||||
self.delegated_vars = result._result.get('_ansible_delegated_vars', None)
|
||||
self._handle_exception(result._result, use_stderr=self.display_failed_stderr)
|
||||
self._handle_exception(result._result, use_stderr=self.get_option('display_failed_stderr'))
|
||||
self._handle_warnings(result._result)
|
||||
|
||||
def _process_result_output(self, result, msg):
|
||||
@@ -109,7 +109,7 @@ class CallbackModule(CallbackModule_default):
|
||||
self._display.display(msg)
|
||||
|
||||
def v2_runner_on_skipped(self, result, ignore_errors=False):
|
||||
if self.display_skipped_hosts:
|
||||
if self.get_option('display_skipped_hosts'):
|
||||
self._preprocess_result(result)
|
||||
display_color = C.COLOR_SKIP
|
||||
msg = "skipped"
|
||||
@@ -128,7 +128,7 @@ class CallbackModule(CallbackModule_default):
|
||||
msg += " | item: %s" % (item_value,)
|
||||
|
||||
task_result = self._process_result_output(result, msg)
|
||||
self._display.display(" " + task_result, display_color, stderr=self.display_failed_stderr)
|
||||
self._display.display(" " + task_result, display_color, stderr=self.get_option('display_failed_stderr'))
|
||||
|
||||
def v2_runner_on_ok(self, result, msg="ok", display_color=C.COLOR_OK):
|
||||
self._preprocess_result(result)
|
||||
@@ -142,7 +142,7 @@ class CallbackModule(CallbackModule_default):
|
||||
display_color = C.COLOR_CHANGED
|
||||
task_result = self._process_result_output(result, msg)
|
||||
self._display.display(" " + task_result, display_color)
|
||||
elif self.display_ok_hosts:
|
||||
elif self.get_option('display_ok_hosts'):
|
||||
task_result = self._process_result_output(result, msg)
|
||||
self._display.display(" " + task_result, display_color)
|
||||
|
||||
@@ -162,7 +162,7 @@ class CallbackModule(CallbackModule_default):
|
||||
display_color = C.COLOR_UNREACHABLE
|
||||
task_result = self._process_result_output(result, msg)
|
||||
|
||||
self._display.display(" " + task_result, display_color, stderr=self.display_failed_stderr)
|
||||
self._display.display(" " + task_result, display_color, stderr=self.get_option('display_failed_stderr'))
|
||||
|
||||
def v2_on_file_diff(self, result):
|
||||
if result._task.loop and 'results' in result._result:
|
||||
@@ -205,7 +205,7 @@ class CallbackModule(CallbackModule_default):
|
||||
colorize(u'ignored', t['ignored'], None)),
|
||||
log_only=True
|
||||
)
|
||||
if stats.custom and self.show_custom_stats:
|
||||
if stats.custom and self.get_option('show_custom_stats'):
|
||||
self._display.banner("CUSTOM STATS: ")
|
||||
# per host
|
||||
# TODO: come up with 'pretty format'
|
||||
|
||||
@@ -11,7 +11,7 @@ DOCUMENTATION = '''
|
||||
author: Unknown (!UNKNOWN)
|
||||
name: yaml
|
||||
type: stdout
|
||||
short_description: yaml-ized Ansible screen output
|
||||
short_description: YAML-ized Ansible screen output
|
||||
description:
|
||||
- Ansible output that can be quite a bit easier to read than the
|
||||
default JSON formatting.
|
||||
@@ -25,12 +25,10 @@ import yaml
|
||||
import json
|
||||
import re
|
||||
import string
|
||||
import sys
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||
from ansible.plugins.callback import CallbackBase, strip_internal_keys, module_response_deepcopy
|
||||
from ansible.plugins.callback import strip_internal_keys, module_response_deepcopy
|
||||
from ansible.plugins.callback.default import CallbackModule as Default
|
||||
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ DOCUMENTATION = '''
|
||||
- The path of the chroot you want to access.
|
||||
default: inventory_hostname
|
||||
vars:
|
||||
- name: inventory_hostname
|
||||
- name: ansible_host
|
||||
executable:
|
||||
description:
|
||||
|
||||
@@ -22,6 +22,7 @@ DOCUMENTATION = '''
|
||||
- Path to the jail
|
||||
default: inventory_hostname
|
||||
vars:
|
||||
- name: inventory_hostname
|
||||
- name: ansible_host
|
||||
- name: ansible_jail_host
|
||||
remote_user:
|
||||
|
||||
@@ -43,6 +43,7 @@ options:
|
||||
- The path on which InfluxDB server is accessible
|
||||
- Only available when using python-influxdb >= 5.1.0
|
||||
type: str
|
||||
default: ''
|
||||
version_added: '0.2.0'
|
||||
validate_certs:
|
||||
description:
|
||||
@@ -80,4 +81,5 @@ options:
|
||||
description:
|
||||
- HTTP(S) proxy to use for Requests to connect to InfluxDB server.
|
||||
type: dict
|
||||
default: {}
|
||||
'''
|
||||
|
||||
@@ -23,6 +23,7 @@ options:
|
||||
description:
|
||||
- The password to use with I(bind_dn).
|
||||
type: str
|
||||
default: ''
|
||||
dn:
|
||||
required: true
|
||||
description:
|
||||
@@ -59,7 +60,7 @@ options:
|
||||
sasl_class:
|
||||
description:
|
||||
- The class to use for SASL authentication.
|
||||
- possible choices are C(external), C(gssapi).
|
||||
- Possible choices are C(external), C(gssapi).
|
||||
type: str
|
||||
choices: ['external', 'gssapi']
|
||||
default: external
|
||||
|
||||
33
plugins/doc_fragments/scaleway_waitable_resource.py
Normal file
33
plugins/doc_fragments/scaleway_waitable_resource.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2022, Guillaume MARTINEZ <lunik@tiwabbit.fr>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
class ModuleDocFragment(object):
|
||||
|
||||
# Standard documentation fragment
|
||||
DOCUMENTATION = r'''
|
||||
options:
|
||||
wait:
|
||||
description:
|
||||
- Wait for the resource to reach its desired state before returning.
|
||||
type: bool
|
||||
default: true
|
||||
wait_timeout:
|
||||
type: int
|
||||
description:
|
||||
- Time to wait for the resource to reach the expected state.
|
||||
required: false
|
||||
default: 300
|
||||
wait_sleep_time:
|
||||
type: int
|
||||
description:
|
||||
- Time to wait before every attempt to check the state of the resource.
|
||||
required: false
|
||||
default: 3
|
||||
'''
|
||||
@@ -17,6 +17,7 @@ options:
|
||||
- Is needed for some modules
|
||||
type: dict
|
||||
required: false
|
||||
default: {}
|
||||
utm_host:
|
||||
description:
|
||||
- The REST Endpoint of the Sophos UTM.
|
||||
|
||||
@@ -26,6 +26,7 @@ DOCUMENTATION = '''
|
||||
description:
|
||||
- The correct parser for the input data.
|
||||
- For example C(ifconfig).
|
||||
- "Note: use underscores instead of dashes (if any) in the parser module name."
|
||||
- See U(https://github.com/kellyjonbrazil/jc#parsers) for the latest list of parsers.
|
||||
type: string
|
||||
required: true
|
||||
@@ -38,10 +39,16 @@ DOCUMENTATION = '''
|
||||
type: boolean
|
||||
default: false
|
||||
requirements:
|
||||
- jc (https://github.com/kellyjonbrazil/jc)
|
||||
- jc installed as a Python library (U(https://pypi.org/project/jc/))
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Install the prereqs of the jc filter (jc Python package) on the Ansible controller
|
||||
delegate_to: localhost
|
||||
ansible.builtin.pip:
|
||||
name: jc
|
||||
state: present
|
||||
|
||||
- name: Run command
|
||||
ansible.builtin.command: uname -a
|
||||
register: result
|
||||
@@ -73,13 +80,13 @@ from ansible.errors import AnsibleError, AnsibleFilterError
|
||||
import importlib
|
||||
|
||||
try:
|
||||
import jc
|
||||
import jc # noqa: F401, pylint: disable=unused-import
|
||||
HAS_LIB = True
|
||||
except ImportError:
|
||||
HAS_LIB = False
|
||||
|
||||
|
||||
def jc(data, parser, quiet=True, raw=False):
|
||||
def jc_filter(data, parser, quiet=True, raw=False):
|
||||
"""Convert returned command output to JSON using the JC library
|
||||
|
||||
Arguments:
|
||||
@@ -94,15 +101,19 @@ def jc(data, parser, quiet=True, raw=False):
|
||||
dictionary or list of dictionaries
|
||||
|
||||
Example:
|
||||
|
||||
- name: run date command
|
||||
hosts: ubuntu
|
||||
tasks:
|
||||
- shell: date
|
||||
- name: install the prereqs of the jc filter (jc Python package) on the Ansible controller
|
||||
delegate_to: localhost
|
||||
ansible.builtin.pip:
|
||||
name: jc
|
||||
state: present
|
||||
- ansible.builtin.shell: date
|
||||
register: result
|
||||
- set_fact:
|
||||
- ansible.builtin.set_fact:
|
||||
myvar: "{{ result.stdout | community.general.jc('date') }}"
|
||||
- debug:
|
||||
- ansible.builtin.debug:
|
||||
msg: "{{ myvar }}"
|
||||
|
||||
produces:
|
||||
@@ -124,7 +135,7 @@ def jc(data, parser, quiet=True, raw=False):
|
||||
"""
|
||||
|
||||
if not HAS_LIB:
|
||||
raise AnsibleError('You need to install "jc" prior to running jc filter')
|
||||
raise AnsibleError('You need to install "jc" as a Python library on the Ansible controller prior to running jc filter')
|
||||
|
||||
try:
|
||||
jc_parser = importlib.import_module('jc.parsers.' + parser)
|
||||
@@ -139,5 +150,5 @@ class FilterModule(object):
|
||||
|
||||
def filters(self):
|
||||
return {
|
||||
'jc': jc
|
||||
'jc': jc_filter,
|
||||
}
|
||||
|
||||
@@ -102,8 +102,6 @@ from ansible.errors import AnsibleFilterError
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.common._collections_compat import Mapping, Sequence
|
||||
from ansible.utils.vars import merge_hash
|
||||
from ansible.release import __version__ as ansible_version
|
||||
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||
|
||||
from collections import defaultdict
|
||||
from operator import itemgetter
|
||||
|
||||
@@ -123,8 +123,7 @@ compose:
|
||||
|
||||
import os
|
||||
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable, Cacheable
|
||||
from ansible.template import Templar
|
||||
|
||||
|
||||
@@ -142,12 +142,10 @@ groupby:
|
||||
attribute: 666
|
||||
'''
|
||||
|
||||
import binascii
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
import os
|
||||
import socket
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin
|
||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||
from ansible.module_utils.common.dict_transformations import dict_merge
|
||||
|
||||
@@ -65,7 +65,7 @@ from sys import version as python_version
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils.urls import open_url
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin
|
||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
from ansible.module_utils.ansible_release import __version__ as ansible_version
|
||||
from ansible.module_utils.six.moves.urllib.parse import urljoin
|
||||
|
||||
|
||||
@@ -410,7 +410,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
stripped_value = value.strip()
|
||||
if stripped_value:
|
||||
parsed_key = key + "_parsed"
|
||||
properties[parsed_key] = [tag.strip() for tag in stripped_value.split(",")]
|
||||
properties[parsed_key] = [tag.strip() for tag in stripped_value.replace(',', ';').split(";")]
|
||||
|
||||
# The first field in the agent string tells you whether the agent is enabled
|
||||
# the rest of the comma separated string is extra config for the agent.
|
||||
|
||||
@@ -186,10 +186,13 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
else:
|
||||
# found vars, accumulate in hostvars for clean inventory set
|
||||
pref_k = 'vbox_' + k.strip().replace(' ', '_')
|
||||
if k.startswith(' '):
|
||||
if prevkey not in hostvars[current_host]:
|
||||
leading_spaces = len(k) - len(k.lstrip(' '))
|
||||
if 0 < leading_spaces <= 2:
|
||||
if prevkey not in hostvars[current_host] or not isinstance(hostvars[current_host][prevkey], dict):
|
||||
hostvars[current_host][prevkey] = {}
|
||||
hostvars[current_host][prevkey][pref_k] = v
|
||||
elif leading_spaces > 2:
|
||||
continue
|
||||
else:
|
||||
if v != '':
|
||||
hostvars[current_host][pref_k] = v
|
||||
|
||||
@@ -78,6 +78,7 @@ compose:
|
||||
|
||||
import json
|
||||
import ssl
|
||||
from time import sleep
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable, Cacheable
|
||||
@@ -138,21 +139,42 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
||||
self.conn = create_connection(
|
||||
'{0}://{1}/api/'.format(proto, xoa_api_host), sslopt=sslopt)
|
||||
|
||||
CALL_TIMEOUT = 100
|
||||
"""Number of 1/10ths of a second to wait before method call times out."""
|
||||
|
||||
def call(self, method, params):
|
||||
"""Calls a method on the XO server with the provided parameters."""
|
||||
id = self.pointer
|
||||
self.conn.send(json.dumps({
|
||||
'id': id,
|
||||
'jsonrpc': '2.0',
|
||||
'method': method,
|
||||
'params': params
|
||||
}))
|
||||
|
||||
waited = 0
|
||||
while waited < self.CALL_TIMEOUT:
|
||||
response = json.loads(self.conn.recv())
|
||||
if 'id' in response and response['id'] == id:
|
||||
return response
|
||||
else:
|
||||
sleep(0.1)
|
||||
waited += 1
|
||||
|
||||
raise AnsibleError(
|
||||
'Method call {method} timed out after {timeout} seconds.'.format(method=method, timeout=self.CALL_TIMEOUT / 10))
|
||||
|
||||
def login(self, user, password):
|
||||
payload = {'id': self.pointer, 'jsonrpc': '2.0', 'method': 'session.signIn', 'params': {
|
||||
'username': user, 'password': password}}
|
||||
self.conn.send(json.dumps(payload))
|
||||
result = json.loads(self.conn.recv())
|
||||
result = self.call('session.signIn', {
|
||||
'username': user, 'password': password
|
||||
})
|
||||
|
||||
if 'error' in result:
|
||||
raise AnsibleError(
|
||||
'Could not connect: {0}'.format(result['error']))
|
||||
|
||||
def get_object(self, name):
|
||||
payload = {'id': self.pointer, 'jsonrpc': '2.0',
|
||||
'method': 'xo.getAllObjects', 'params': {'filter': {'type': name}}}
|
||||
self.conn.send(json.dumps(payload))
|
||||
answer = json.loads(self.conn.recv())
|
||||
answer = self.call('xo.getAllObjects', {'filter': {'type': name}})
|
||||
|
||||
if 'error' in answer:
|
||||
raise AnsibleError(
|
||||
|
||||
@@ -22,6 +22,11 @@ DOCUMENTATION = """
|
||||
required: true
|
||||
type: list
|
||||
elements: str
|
||||
search:
|
||||
description: Field to retrieve, for example C(name) or C(id).
|
||||
type: str
|
||||
default: name
|
||||
version_added: 5.7.0
|
||||
field:
|
||||
description: Field to fetch; leave unset to fetch whole response.
|
||||
type: str
|
||||
@@ -33,6 +38,11 @@ EXAMPLES = """
|
||||
msg: >-
|
||||
{{ lookup('community.general.bitwarden', 'a_test', field='password') }}
|
||||
|
||||
- name: "Get 'password' from Bitwarden record with id 'bafba515-af11-47e6-abe3-af1200cd18b2'"
|
||||
ansible.builtin.debug:
|
||||
msg: >-
|
||||
{{ lookup('community.general.bitwarden', 'bafba515-af11-47e6-abe3-af1200cd18b2', search='id', field='password') }}
|
||||
|
||||
- name: "Get full Bitwarden record named 'a_test'"
|
||||
ansible.builtin.debug:
|
||||
msg: >-
|
||||
@@ -68,7 +78,7 @@ class Bitwarden(object):
|
||||
return self._cli_path
|
||||
|
||||
@property
|
||||
def logged_in(self):
|
||||
def unlocked(self):
|
||||
out, err = self._run(['status'], stdin="")
|
||||
decoded = AnsibleJSONDecoder().raw_decode(out)[0]
|
||||
return decoded['status'] == 'unlocked'
|
||||
@@ -81,7 +91,7 @@ class Bitwarden(object):
|
||||
raise BitwardenException(err)
|
||||
return to_text(out, errors='surrogate_or_strict'), to_text(err, errors='surrogate_or_strict')
|
||||
|
||||
def _get_matches(self, search_value, search_field="name"):
|
||||
def _get_matches(self, search_value, search_field):
|
||||
"""Return matching records whose search_field is equal to key.
|
||||
"""
|
||||
out, err = self._run(['list', 'items', '--search', search_value])
|
||||
@@ -97,7 +107,7 @@ class Bitwarden(object):
|
||||
|
||||
If field is None, return the whole record for each match.
|
||||
"""
|
||||
matches = self._get_matches(search_value)
|
||||
matches = self._get_matches(search_value, search_field)
|
||||
|
||||
if field:
|
||||
return [match['login'][field] for match in matches]
|
||||
@@ -110,10 +120,11 @@ class LookupModule(LookupBase):
|
||||
def run(self, terms, variables=None, **kwargs):
|
||||
self.set_options(var_options=variables, direct=kwargs)
|
||||
field = self.get_option('field')
|
||||
if not _bitwarden.logged_in:
|
||||
raise AnsibleError("Not logged into Bitwarden. Run 'bw login'.")
|
||||
search_field = self.get_option('search')
|
||||
if not _bitwarden.unlocked:
|
||||
raise AnsibleError("Bitwarden Vault locked. Run 'bw unlock'.")
|
||||
|
||||
return [_bitwarden.get_field(field, term) for term in terms]
|
||||
return [_bitwarden.get_field(field, term, search_field) for term in terms]
|
||||
|
||||
|
||||
_bitwarden = Bitwarden()
|
||||
|
||||
@@ -64,7 +64,12 @@ class LookupModule(LookupBase):
|
||||
"""
|
||||
results = []
|
||||
for x in terms:
|
||||
intermediate = listify_lookup_plugin_terms(x, templar=self._templar, loader=self._loader)
|
||||
try:
|
||||
intermediate = listify_lookup_plugin_terms(x, templar=self._templar)
|
||||
except TypeError:
|
||||
# The loader argument is deprecated in ansible-core 2.14+. Fall back to
|
||||
# pre-2.14 behavior for older ansible-core versions.
|
||||
intermediate = listify_lookup_plugin_terms(x, templar=self._templar, loader=self._loader)
|
||||
results.append(intermediate)
|
||||
return results
|
||||
|
||||
|
||||
@@ -105,7 +105,6 @@ RETURN = """
|
||||
type: dict
|
||||
"""
|
||||
|
||||
import os
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlparse
|
||||
from ansible.errors import AnsibleError, AnsibleAssertionError
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
|
||||
@@ -80,7 +80,6 @@ from subprocess import Popen
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
from ansible.parsing.splitter import parse_kv
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
|
||||
from ansible.utils.display import Display
|
||||
|
||||
|
||||
@@ -125,8 +125,16 @@ from ansible.errors import AnsibleLookupError
|
||||
from ansible.module_utils.common._collections_compat import Mapping, Sequence
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
from ansible.release import __version__ as ansible_version
|
||||
from ansible.template import Templar
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||
|
||||
|
||||
# Whether Templar has a cache, which can be controlled by Templar.template()'s cache option.
|
||||
# The cache was removed for ansible-core 2.14 (https://github.com/ansible/ansible/pull/78419)
|
||||
_TEMPLAR_HAS_TEMPLATE_CACHE = LooseVersion(ansible_version) < LooseVersion('2.14.0')
|
||||
|
||||
|
||||
class LookupModule(LookupBase):
|
||||
def __evaluate(self, expression, templar, variables):
|
||||
@@ -136,7 +144,10 @@ class LookupModule(LookupBase):
|
||||
``variables`` are the variables to use.
|
||||
"""
|
||||
templar.available_variables = variables or {}
|
||||
return templar.template("{0}{1}{2}".format("{{", expression, "}}"), cache=False)
|
||||
expression = "{0}{1}{2}".format("{{", expression, "}}")
|
||||
if _TEMPLAR_HAS_TEMPLATE_CACHE:
|
||||
return templar.template(expression, cache=False)
|
||||
return templar.template(expression)
|
||||
|
||||
def __process(self, result, terms, index, current, templar, variables):
|
||||
"""Fills ``result`` list with evaluated items.
|
||||
|
||||
@@ -218,7 +218,7 @@ def make_rdata_dict(rdata):
|
||||
NSEC3PARAM: ['algorithm', 'flags', 'iterations', 'salt'],
|
||||
PTR: ['target'],
|
||||
RP: ['mbox', 'txt'],
|
||||
# RRSIG: ['algorithm', 'labels', 'original_ttl', 'expiration', 'inception', 'signature'],
|
||||
# RRSIG: ['type_covered', 'algorithm', 'labels', 'original_ttl', 'expiration', 'inception', 'key_tag', 'signer', 'signature'],
|
||||
SOA: ['mname', 'rname', 'serial', 'refresh', 'retry', 'expire', 'minimum'],
|
||||
SPF: ['strings'],
|
||||
SRV: ['priority', 'weight', 'port', 'target'],
|
||||
@@ -241,6 +241,8 @@ def make_rdata_dict(rdata):
|
||||
val = dns.rdata._hexify(rdata.digest).replace(' ', '')
|
||||
if rdata.rdtype == DS and f == 'digest':
|
||||
val = dns.rdata._hexify(rdata.digest).replace(' ', '')
|
||||
if rdata.rdtype == DNSKEY and f == 'algorithm':
|
||||
val = int(val)
|
||||
if rdata.rdtype == DNSKEY and f == 'key':
|
||||
val = dns.rdata._base64ify(rdata.key).replace(' ', '')
|
||||
if rdata.rdtype == NSEC3PARAM and f == 'salt':
|
||||
|
||||
@@ -136,12 +136,11 @@ RETURN = '''
|
||||
|
||||
import re
|
||||
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
from ansible.utils.display import Display
|
||||
from ansible.errors import AnsibleLookupError
|
||||
from ansible.module_utils.basic import missing_required_lib
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
from ansible.errors import AnsibleError, AnsibleLookupError
|
||||
from ansible.utils.display import Display
|
||||
|
||||
try:
|
||||
import etcd3
|
||||
|
||||
@@ -64,7 +64,12 @@ class LookupModule(LookupBase):
|
||||
|
||||
if isinstance(term, string_types):
|
||||
# convert a variable to a list
|
||||
term2 = listify_lookup_plugin_terms(term, templar=self._templar, loader=self._loader)
|
||||
try:
|
||||
term2 = listify_lookup_plugin_terms(term, templar=self._templar)
|
||||
except TypeError:
|
||||
# The loader argument is deprecated in ansible-core 2.14+. Fall back to
|
||||
# pre-2.14 behavior for older ansible-core versions.
|
||||
term2 = listify_lookup_plugin_terms(term, templar=self._templar, loader=self._loader)
|
||||
# but avoid converting a plain string to a list of one string
|
||||
if term2 != [term]:
|
||||
term = term2
|
||||
|
||||
@@ -68,8 +68,8 @@ from ansible.module_utils import six
|
||||
from ansible.utils.display import Display
|
||||
from traceback import format_exception
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
import sys
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
@@ -74,18 +74,18 @@ EXAMPLES = """
|
||||
|
||||
- name: Retrieve password for HAL when not signed in to 1Password
|
||||
ansible.builtin.debug:
|
||||
var: lookup('community.general.onepassword'
|
||||
'HAL 9000'
|
||||
subdomain='Discovery'
|
||||
var: lookup('community.general.onepassword',
|
||||
'HAL 9000',
|
||||
subdomain='Discovery',
|
||||
master_password=vault_master_password)
|
||||
|
||||
- name: Retrieve password for HAL when never signed in to 1Password
|
||||
ansible.builtin.debug:
|
||||
var: lookup('community.general.onepassword'
|
||||
'HAL 9000'
|
||||
subdomain='Discovery'
|
||||
master_password=vault_master_password
|
||||
username='tweety@acme.com'
|
||||
var: lookup('community.general.onepassword',
|
||||
'HAL 9000',
|
||||
subdomain='Discovery',
|
||||
master_password=vault_master_password,
|
||||
username='tweety@acme.com',
|
||||
secret_key=vault_secret_key)
|
||||
"""
|
||||
|
||||
|
||||
@@ -203,7 +203,6 @@ import time
|
||||
import yaml
|
||||
|
||||
from ansible.errors import AnsibleError, AnsibleAssertionError
|
||||
from ansible.module_utils.common.file import FileLock
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text
|
||||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
from ansible.utils.display import Display
|
||||
@@ -211,6 +210,8 @@ from ansible.utils.encrypt import random_password
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
from ansible import constants as C
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils._filelock import FileLock
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
@@ -268,7 +269,7 @@ class LookupModule(LookupBase):
|
||||
)
|
||||
self.realpass = 'pass: the standard unix password manager' in passoutput
|
||||
except (subprocess.CalledProcessError) as e:
|
||||
raise AnsibleError(e)
|
||||
raise AnsibleError('exit code {0} while running {1}. Error output: {2}'.format(e.returncode, e.cmd, e.output))
|
||||
|
||||
return self.realpass
|
||||
|
||||
@@ -354,7 +355,7 @@ class LookupModule(LookupBase):
|
||||
except (subprocess.CalledProcessError) as e:
|
||||
# 'not in password store' is the expected error if a password wasn't found
|
||||
if 'not in the password store' not in e.output:
|
||||
raise AnsibleError(e)
|
||||
raise AnsibleError('exit code {0} while running {1}. Error output: {2}'.format(e.returncode, e.cmd, e.output))
|
||||
|
||||
if self.paramvals['missing'] == 'error':
|
||||
raise AnsibleError('passwordstore: passname {0} not found and missing=error is set'.format(self.passname))
|
||||
@@ -387,7 +388,7 @@ class LookupModule(LookupBase):
|
||||
try:
|
||||
check_output2([self.pass_cmd, 'insert', '-f', '-m', self.passname], input=msg, env=self.env)
|
||||
except (subprocess.CalledProcessError) as e:
|
||||
raise AnsibleError(e)
|
||||
raise AnsibleError('exit code {0} while running {1}. Error output: {2}'.format(e.returncode, e.cmd, e.output))
|
||||
return newpass
|
||||
|
||||
def generate_password(self):
|
||||
@@ -399,7 +400,7 @@ class LookupModule(LookupBase):
|
||||
try:
|
||||
check_output2([self.pass_cmd, 'insert', '-f', '-m', self.passname], input=msg, env=self.env)
|
||||
except (subprocess.CalledProcessError) as e:
|
||||
raise AnsibleError(e)
|
||||
raise AnsibleError('exit code {0} while running {1}. Error output: {2}'.format(e.returncode, e.cmd, e.output))
|
||||
return newpass
|
||||
|
||||
def get_passresult(self):
|
||||
|
||||
@@ -73,8 +73,6 @@ _raw:
|
||||
elements: str
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
HAVE_REDIS = False
|
||||
try:
|
||||
import redis
|
||||
|
||||
109
plugins/module_utils/_filelock.py
Normal file
109
plugins/module_utils/_filelock.py
Normal file
@@ -0,0 +1,109 @@
|
||||
# Copyright (c) 2018, Ansible Project
|
||||
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
|
||||
# SPDX-License-Identifier: BSD-2-Clause
|
||||
|
||||
# NOTE:
|
||||
# This has been vendored from ansible.module_utils.common.file. This code has been removed from there for ansible-core 2.16.
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import stat
|
||||
import time
|
||||
import fcntl
|
||||
import sys
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
|
||||
class LockTimeout(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class FileLock:
|
||||
'''
|
||||
Currently FileLock is implemented via fcntl.flock on a lock file, however this
|
||||
behaviour may change in the future. Avoid mixing lock types fcntl.flock,
|
||||
fcntl.lockf and module_utils.common.file.FileLock as it will certainly cause
|
||||
unwanted and/or unexpected behaviour
|
||||
'''
|
||||
def __init__(self):
|
||||
self.lockfd = None
|
||||
|
||||
@contextmanager
|
||||
def lock_file(self, path, tmpdir, lock_timeout=None):
|
||||
'''
|
||||
Context for lock acquisition
|
||||
'''
|
||||
try:
|
||||
self.set_lock(path, tmpdir, lock_timeout)
|
||||
yield
|
||||
finally:
|
||||
self.unlock()
|
||||
|
||||
def set_lock(self, path, tmpdir, lock_timeout=None):
|
||||
'''
|
||||
Create a lock file based on path with flock to prevent other processes
|
||||
using given path.
|
||||
Please note that currently file locking only works when it's executed by
|
||||
the same user, I.E single user scenarios
|
||||
|
||||
:kw path: Path (file) to lock
|
||||
:kw tmpdir: Path where to place the temporary .lock file
|
||||
:kw lock_timeout:
|
||||
Wait n seconds for lock acquisition, fail if timeout is reached.
|
||||
0 = Do not wait, fail if lock cannot be acquired immediately,
|
||||
Default is None, wait indefinitely until lock is released.
|
||||
:returns: True
|
||||
'''
|
||||
lock_path = os.path.join(tmpdir, 'ansible-{0}.lock'.format(os.path.basename(path)))
|
||||
l_wait = 0.1
|
||||
r_exception = IOError
|
||||
if sys.version_info[0] == 3:
|
||||
r_exception = BlockingIOError
|
||||
|
||||
self.lockfd = open(lock_path, 'w')
|
||||
|
||||
if lock_timeout <= 0:
|
||||
fcntl.flock(self.lockfd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
os.chmod(lock_path, stat.S_IWRITE | stat.S_IREAD)
|
||||
return True
|
||||
|
||||
if lock_timeout:
|
||||
e_secs = 0
|
||||
while e_secs < lock_timeout:
|
||||
try:
|
||||
fcntl.flock(self.lockfd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
os.chmod(lock_path, stat.S_IWRITE | stat.S_IREAD)
|
||||
return True
|
||||
except r_exception:
|
||||
time.sleep(l_wait)
|
||||
e_secs += l_wait
|
||||
continue
|
||||
|
||||
self.lockfd.close()
|
||||
raise LockTimeout('{0} sec'.format(lock_timeout))
|
||||
|
||||
fcntl.flock(self.lockfd, fcntl.LOCK_EX)
|
||||
os.chmod(lock_path, stat.S_IWRITE | stat.S_IREAD)
|
||||
|
||||
return True
|
||||
|
||||
def unlock(self):
|
||||
'''
|
||||
Make sure lock file is available for everyone and Unlock the file descriptor
|
||||
locked by set_lock
|
||||
|
||||
:returns: True
|
||||
'''
|
||||
if not self.lockfd:
|
||||
return True
|
||||
|
||||
try:
|
||||
fcntl.flock(self.lockfd, fcntl.LOCK_UN)
|
||||
self.lockfd.close()
|
||||
except ValueError: # file wasn't opened, let context manager fail gracefully
|
||||
pass
|
||||
|
||||
return True
|
||||
@@ -88,9 +88,10 @@ class FormatError(CmdRunnerException):
|
||||
|
||||
|
||||
class _ArgFormat(object):
|
||||
def __init__(self, func, ignore_none=None):
|
||||
def __init__(self, func, ignore_none=None, ignore_missing_value=False):
|
||||
self.func = func
|
||||
self.ignore_none = ignore_none
|
||||
self.ignore_missing_value = ignore_missing_value
|
||||
|
||||
def __call__(self, value, ctx_ignore_none):
|
||||
ignore_none = self.ignore_none if self.ignore_none is not None else ctx_ignore_none
|
||||
@@ -127,7 +128,7 @@ class _Format(object):
|
||||
|
||||
@staticmethod
|
||||
def as_fixed(args):
|
||||
return _ArgFormat(lambda value: _ensure_list(args), ignore_none=False)
|
||||
return _ArgFormat(lambda value: _ensure_list(args), ignore_none=False, ignore_missing_value=True)
|
||||
|
||||
@staticmethod
|
||||
def as_func(func, ignore_none=None):
|
||||
@@ -135,14 +136,15 @@ class _Format(object):
|
||||
|
||||
@staticmethod
|
||||
def as_map(_map, default=None, ignore_none=None):
|
||||
if default is None:
|
||||
default = []
|
||||
return _ArgFormat(lambda value: _ensure_list(_map.get(value, default)), ignore_none=ignore_none)
|
||||
|
||||
@staticmethod
|
||||
def as_default_type(_type, arg="", ignore_none=None):
|
||||
fmt = _Format
|
||||
if _type == "dict":
|
||||
return fmt.as_func(lambda d: ["--{0}={1}".format(*a) for a in iteritems(d)],
|
||||
ignore_none=ignore_none)
|
||||
return fmt.as_func(lambda d: ["--{0}={1}".format(*a) for a in iteritems(d)], ignore_none=ignore_none)
|
||||
if _type == "list":
|
||||
return fmt.as_func(lambda value: ["--{0}".format(x) for x in value], ignore_none=ignore_none)
|
||||
if _type == "bool":
|
||||
@@ -261,10 +263,13 @@ class _CmdRunnerContext(object):
|
||||
for arg_name in self.args_order:
|
||||
value = None
|
||||
try:
|
||||
value = named_args[arg_name]
|
||||
if arg_name in named_args:
|
||||
value = named_args[arg_name]
|
||||
elif not runner.arg_formats[arg_name].ignore_missing_value:
|
||||
raise MissingArgumentValue(self.args_order, arg_name)
|
||||
self.cmd.extend(runner.arg_formats[arg_name](value, ctx_ignore_none=self.ignore_value_none))
|
||||
except KeyError:
|
||||
raise MissingArgumentValue(self.args_order, arg_name)
|
||||
except MissingArgumentValue:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise FormatError(arg_name, value, runner.arg_formats[arg_name], e)
|
||||
|
||||
|
||||
@@ -19,15 +19,16 @@ import os
|
||||
import re
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
# (TODO: remove AnsibleModule from next line!)
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib # noqa: F401, pylint: disable=unused-import
|
||||
from ansible.module_utils.six.moves import configparser
|
||||
from os.path import expanduser
|
||||
from uuid import UUID
|
||||
|
||||
LIBCLOUD_IMP_ERR = None
|
||||
try:
|
||||
from libcloud.common.dimensiondata import API_ENDPOINTS, DimensionDataAPIException, DimensionDataStatus
|
||||
from libcloud.compute.base import Node, NodeLocation
|
||||
from libcloud.common.dimensiondata import API_ENDPOINTS, DimensionDataAPIException, DimensionDataStatus # noqa: F401, pylint: disable=unused-import
|
||||
from libcloud.compute.base import Node, NodeLocation # noqa: F401, pylint: disable=unused-import
|
||||
from libcloud.compute.providers import get_driver
|
||||
from libcloud.compute.types import Provider
|
||||
|
||||
|
||||
@@ -110,3 +110,14 @@ def gitlab_authentication(module):
|
||||
GitLab remove Session API now that private tokens are removed from user API endpoints since version 10.2." % to_native(e))
|
||||
|
||||
return gitlab_instance
|
||||
|
||||
|
||||
def filter_returned_variables(gitlab_variables):
|
||||
# pop properties we don't know
|
||||
existing_variables = [dict(x.attributes) for x in gitlab_variables]
|
||||
KNOWN = ['key', 'value', 'masked', 'protected', 'variable_type', 'environment_scope']
|
||||
for item in existing_variables:
|
||||
for key in list(item.keys()):
|
||||
if key not in KNOWN:
|
||||
item.pop(key)
|
||||
return existing_variables
|
||||
|
||||
@@ -29,8 +29,15 @@ URL_CLIENT_ROLE_COMPOSITES = "{url}/admin/realms/{realm}/clients/{id}/roles/{nam
|
||||
|
||||
URL_REALM_ROLES = "{url}/admin/realms/{realm}/roles"
|
||||
URL_REALM_ROLE = "{url}/admin/realms/{realm}/roles/{name}"
|
||||
URL_REALM_ROLEMAPPINGS = "{url}/admin/realms/{realm}/users/{id}/role-mappings/realm"
|
||||
URL_REALM_ROLEMAPPINGS_AVAILABLE = "{url}/admin/realms/{realm}/users/{id}/role-mappings/realm/available"
|
||||
URL_REALM_ROLEMAPPINGS_COMPOSITE = "{url}/admin/realms/{realm}/users/{id}/role-mappings/realm/composite"
|
||||
URL_REALM_ROLE_COMPOSITES = "{url}/admin/realms/{realm}/roles/{name}/composites"
|
||||
|
||||
URL_ROLES_BY_ID = "{url}/admin/realms/{realm}/roles-by-id/{id}"
|
||||
URL_ROLES_BY_ID_COMPOSITES_CLIENTS = "{url}/admin/realms/{realm}/roles-by-id/{id}/composites/clients/{cid}"
|
||||
URL_ROLES_BY_ID_COMPOSITES = "{url}/admin/realms/{realm}/roles-by-id/{id}/composites"
|
||||
|
||||
URL_CLIENTTEMPLATE = "{url}/admin/realms/{realm}/client-templates/{id}"
|
||||
URL_CLIENTTEMPLATES = "{url}/admin/realms/{realm}/client-templates"
|
||||
URL_GROUPS = "{url}/admin/realms/{realm}/groups"
|
||||
@@ -41,9 +48,15 @@ URL_CLIENTSCOPE = "{url}/admin/realms/{realm}/client-scopes/{id}"
|
||||
URL_CLIENTSCOPE_PROTOCOLMAPPERS = "{url}/admin/realms/{realm}/client-scopes/{id}/protocol-mappers/models"
|
||||
URL_CLIENTSCOPE_PROTOCOLMAPPER = "{url}/admin/realms/{realm}/client-scopes/{id}/protocol-mappers/models/{mapper_id}"
|
||||
|
||||
URL_CLIENT_ROLEMAPPINGS = "{url}/admin/realms/{realm}/groups/{id}/role-mappings/clients/{client}"
|
||||
URL_CLIENT_ROLEMAPPINGS_AVAILABLE = "{url}/admin/realms/{realm}/groups/{id}/role-mappings/clients/{client}/available"
|
||||
URL_CLIENT_ROLEMAPPINGS_COMPOSITE = "{url}/admin/realms/{realm}/groups/{id}/role-mappings/clients/{client}/composite"
|
||||
URL_CLIENT_GROUP_ROLEMAPPINGS = "{url}/admin/realms/{realm}/groups/{id}/role-mappings/clients/{client}"
|
||||
URL_CLIENT_GROUP_ROLEMAPPINGS_AVAILABLE = "{url}/admin/realms/{realm}/groups/{id}/role-mappings/clients/{client}/available"
|
||||
URL_CLIENT_GROUP_ROLEMAPPINGS_COMPOSITE = "{url}/admin/realms/{realm}/groups/{id}/role-mappings/clients/{client}/composite"
|
||||
|
||||
URL_USERS = "{url}/admin/realms/{realm}/users"
|
||||
URL_CLIENT_SERVICE_ACCOUNT_USER = "{url}/admin/realms/{realm}/clients/{id}/service-account-user"
|
||||
URL_CLIENT_USER_ROLEMAPPINGS = "{url}/admin/realms/{realm}/users/{id}/role-mappings/clients/{client}"
|
||||
URL_CLIENT_USER_ROLEMAPPINGS_AVAILABLE = "{url}/admin/realms/{realm}/users/{id}/role-mappings/clients/{client}/available"
|
||||
URL_CLIENT_USER_ROLEMAPPINGS_COMPOSITE = "{url}/admin/realms/{realm}/users/{id}/role-mappings/clients/{client}/composite"
|
||||
|
||||
URL_AUTHENTICATION_FLOWS = "{url}/admin/realms/{realm}/authentication/flows"
|
||||
URL_AUTHENTICATION_FLOW = "{url}/admin/realms/{realm}/authentication/flows/{id}"
|
||||
@@ -446,10 +459,9 @@ class KeycloakAPI(object):
|
||||
self.module.fail_json(msg="Could not fetch rolemappings for client %s in realm %s: %s"
|
||||
% (cid, realm, str(e)))
|
||||
|
||||
def get_client_role_by_name(self, gid, cid, name, realm="master"):
|
||||
def get_client_role_id_by_name(self, cid, name, realm="master"):
|
||||
""" Get the role ID of a client.
|
||||
|
||||
:param gid: ID of the group from which to obtain the rolemappings.
|
||||
:param cid: ID of the client from which to obtain the rolemappings.
|
||||
:param name: Name of the role.
|
||||
:param realm: Realm from which to obtain the rolemappings.
|
||||
@@ -461,7 +473,7 @@ class KeycloakAPI(object):
|
||||
return role['id']
|
||||
return None
|
||||
|
||||
def get_client_rolemapping_by_id(self, gid, cid, rid, realm='master'):
|
||||
def get_client_group_rolemapping_by_id(self, gid, cid, rid, realm='master'):
|
||||
""" Obtain client representation by id
|
||||
|
||||
:param gid: ID of the group from which to obtain the rolemappings.
|
||||
@@ -470,7 +482,7 @@ class KeycloakAPI(object):
|
||||
:param realm: client from this realm
|
||||
:return: dict of rolemapping representation or None if none matching exist
|
||||
"""
|
||||
rolemappings_url = URL_CLIENT_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=gid, client=cid)
|
||||
rolemappings_url = URL_CLIENT_GROUP_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=gid, client=cid)
|
||||
try:
|
||||
rolemappings = json.loads(to_native(open_url(rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders,
|
||||
timeout=self.connection_timeout,
|
||||
@@ -483,7 +495,7 @@ class KeycloakAPI(object):
|
||||
% (cid, gid, realm, str(e)))
|
||||
return None
|
||||
|
||||
def get_client_available_rolemappings(self, gid, cid, realm="master"):
|
||||
def get_client_group_available_rolemappings(self, gid, cid, realm="master"):
|
||||
""" Fetch the available role of a client in a specified goup on the Keycloak server.
|
||||
|
||||
:param gid: ID of the group from which to obtain the rolemappings.
|
||||
@@ -491,7 +503,7 @@ class KeycloakAPI(object):
|
||||
:param realm: Realm from which to obtain the rolemappings.
|
||||
:return: The rollemappings of specified group and client of the realm (default "master").
|
||||
"""
|
||||
available_rolemappings_url = URL_CLIENT_ROLEMAPPINGS_AVAILABLE.format(url=self.baseurl, realm=realm, id=gid, client=cid)
|
||||
available_rolemappings_url = URL_CLIENT_GROUP_ROLEMAPPINGS_AVAILABLE.format(url=self.baseurl, realm=realm, id=gid, client=cid)
|
||||
try:
|
||||
return json.loads(to_native(open_url(available_rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders,
|
||||
timeout=self.connection_timeout,
|
||||
@@ -500,7 +512,7 @@ class KeycloakAPI(object):
|
||||
self.module.fail_json(msg="Could not fetch available rolemappings for client %s in group %s, realm %s: %s"
|
||||
% (cid, gid, realm, str(e)))
|
||||
|
||||
def get_client_composite_rolemappings(self, gid, cid, realm="master"):
|
||||
def get_client_group_composite_rolemappings(self, gid, cid, realm="master"):
|
||||
""" Fetch the composite role of a client in a specified group on the Keycloak server.
|
||||
|
||||
:param gid: ID of the group from which to obtain the rolemappings.
|
||||
@@ -508,15 +520,64 @@ class KeycloakAPI(object):
|
||||
:param realm: Realm from which to obtain the rolemappings.
|
||||
:return: The rollemappings of specified group and client of the realm (default "master").
|
||||
"""
|
||||
available_rolemappings_url = URL_CLIENT_ROLEMAPPINGS_COMPOSITE.format(url=self.baseurl, realm=realm, id=gid, client=cid)
|
||||
composite_rolemappings_url = URL_CLIENT_GROUP_ROLEMAPPINGS_COMPOSITE.format(url=self.baseurl, realm=realm, id=gid, client=cid)
|
||||
try:
|
||||
return json.loads(to_native(open_url(available_rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders,
|
||||
return json.loads(to_native(open_url(composite_rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders,
|
||||
timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not fetch available rolemappings for client %s in group %s, realm %s: %s"
|
||||
% (cid, gid, realm, str(e)))
|
||||
|
||||
def get_role_by_id(self, rid, realm="master"):
|
||||
""" Fetch a role by its id on the Keycloak server.
|
||||
|
||||
:param rid: ID of the role.
|
||||
:param realm: Realm from which to obtain the rolemappings.
|
||||
:return: The role.
|
||||
"""
|
||||
client_roles_url = URL_ROLES_BY_ID.format(url=self.baseurl, realm=realm, id=rid)
|
||||
try:
|
||||
return json.loads(to_native(open_url(client_roles_url, method="GET", http_agent=self.http_agent, headers=self.restheaders,
|
||||
timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not fetch role for id %s in realm %s: %s"
|
||||
% (rid, realm, str(e)))
|
||||
|
||||
def get_client_roles_by_id_composite_rolemappings(self, rid, cid, realm="master"):
|
||||
""" Fetch a role by its id on the Keycloak server.
|
||||
|
||||
:param rid: ID of the composite role.
|
||||
:param cid: ID of the client from which to obtain the rolemappings.
|
||||
:param realm: Realm from which to obtain the rolemappings.
|
||||
:return: The role.
|
||||
"""
|
||||
client_roles_url = URL_ROLES_BY_ID_COMPOSITES_CLIENTS.format(url=self.baseurl, realm=realm, id=rid, cid=cid)
|
||||
try:
|
||||
return json.loads(to_native(open_url(client_roles_url, method="GET", http_agent=self.http_agent, headers=self.restheaders,
|
||||
timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not fetch role for id %s and cid %s in realm %s: %s"
|
||||
% (rid, cid, realm, str(e)))
|
||||
|
||||
def add_client_roles_by_id_composite_rolemapping(self, rid, roles_rep, realm="master"):
|
||||
""" Assign roles to composite role
|
||||
|
||||
:param rid: ID of the composite role.
|
||||
:param roles_rep: Representation of the roles to assign.
|
||||
:param realm: Realm from which to obtain the rolemappings.
|
||||
:return: None.
|
||||
"""
|
||||
available_rolemappings_url = URL_ROLES_BY_ID_COMPOSITES.format(url=self.baseurl, realm=realm, id=rid)
|
||||
try:
|
||||
open_url(available_rolemappings_url, method="POST", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(roles_rep),
|
||||
validate_certs=self.validate_certs, timeout=self.connection_timeout)
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not assign roles to composite role %s and realm %s: %s"
|
||||
% (rid, realm, str(e)))
|
||||
|
||||
def add_group_rolemapping(self, gid, cid, role_rep, realm="master"):
|
||||
""" Fetch the composite role of a client in a specified goup on the Keycloak server.
|
||||
|
||||
@@ -526,7 +587,7 @@ class KeycloakAPI(object):
|
||||
:param realm: Realm from which to obtain the rolemappings.
|
||||
:return: None.
|
||||
"""
|
||||
available_rolemappings_url = URL_CLIENT_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=gid, client=cid)
|
||||
available_rolemappings_url = URL_CLIENT_GROUP_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=gid, client=cid)
|
||||
try:
|
||||
open_url(available_rolemappings_url, method="POST", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(role_rep),
|
||||
validate_certs=self.validate_certs, timeout=self.connection_timeout)
|
||||
@@ -543,14 +604,214 @@ class KeycloakAPI(object):
|
||||
:param realm: Realm from which to obtain the rolemappings.
|
||||
:return: None.
|
||||
"""
|
||||
available_rolemappings_url = URL_CLIENT_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=gid, client=cid)
|
||||
available_rolemappings_url = URL_CLIENT_GROUP_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=gid, client=cid)
|
||||
try:
|
||||
open_url(available_rolemappings_url, method="DELETE", http_agent=self.http_agent, headers=self.restheaders,
|
||||
open_url(available_rolemappings_url, method="DELETE", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(role_rep),
|
||||
validate_certs=self.validate_certs, timeout=self.connection_timeout)
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not delete available rolemappings for client %s in group %s, realm %s: %s"
|
||||
% (cid, gid, realm, str(e)))
|
||||
|
||||
def get_client_user_rolemapping_by_id(self, uid, cid, rid, realm='master'):
|
||||
""" Obtain client representation by id
|
||||
|
||||
:param uid: ID of the user from which to obtain the rolemappings.
|
||||
:param cid: ID of the client from which to obtain the rolemappings.
|
||||
:param rid: ID of the role.
|
||||
:param realm: client from this realm
|
||||
:return: dict of rolemapping representation or None if none matching exist
|
||||
"""
|
||||
rolemappings_url = URL_CLIENT_USER_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=uid, client=cid)
|
||||
try:
|
||||
rolemappings = json.loads(to_native(open_url(rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders,
|
||||
timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
for role in rolemappings:
|
||||
if rid == role['id']:
|
||||
return role
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not fetch rolemappings for client %s and user %s, realm %s: %s"
|
||||
% (cid, uid, realm, str(e)))
|
||||
return None
|
||||
|
||||
def get_client_user_available_rolemappings(self, uid, cid, realm="master"):
|
||||
""" Fetch the available role of a client for a specified user on the Keycloak server.
|
||||
|
||||
:param uid: ID of the user from which to obtain the rolemappings.
|
||||
:param cid: ID of the client from which to obtain the rolemappings.
|
||||
:param realm: Realm from which to obtain the rolemappings.
|
||||
:return: The effective rollemappings of specified client and user of the realm (default "master").
|
||||
"""
|
||||
available_rolemappings_url = URL_CLIENT_USER_ROLEMAPPINGS_AVAILABLE.format(url=self.baseurl, realm=realm, id=uid, client=cid)
|
||||
try:
|
||||
return json.loads(to_native(open_url(available_rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders,
|
||||
timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not fetch effective rolemappings for client %s and user %s, realm %s: %s"
|
||||
% (cid, uid, realm, str(e)))
|
||||
|
||||
def get_client_user_composite_rolemappings(self, uid, cid, realm="master"):
|
||||
""" Fetch the composite role of a client for a specified user on the Keycloak server.
|
||||
|
||||
:param uid: ID of the user from which to obtain the rolemappings.
|
||||
:param cid: ID of the client from which to obtain the rolemappings.
|
||||
:param realm: Realm from which to obtain the rolemappings.
|
||||
:return: The rollemappings of specified group and client of the realm (default "master").
|
||||
"""
|
||||
composite_rolemappings_url = URL_CLIENT_USER_ROLEMAPPINGS_COMPOSITE.format(url=self.baseurl, realm=realm, id=uid, client=cid)
|
||||
try:
|
||||
return json.loads(to_native(open_url(composite_rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders,
|
||||
timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not fetch available rolemappings for user %s of realm %s: %s"
|
||||
% (uid, realm, str(e)))
|
||||
|
||||
def get_realm_user_rolemapping_by_id(self, uid, rid, realm='master'):
|
||||
""" Obtain role representation by id
|
||||
|
||||
:param uid: ID of the user from which to obtain the rolemappings.
|
||||
:param rid: ID of the role.
|
||||
:param realm: client from this realm
|
||||
:return: dict of rolemapping representation or None if none matching exist
|
||||
"""
|
||||
rolemappings_url = URL_REALM_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=uid)
|
||||
try:
|
||||
rolemappings = json.loads(to_native(open_url(rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders,
|
||||
timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
for role in rolemappings:
|
||||
if rid == role['id']:
|
||||
return role
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not fetch rolemappings for user %s, realm %s: %s"
|
||||
% (uid, realm, str(e)))
|
||||
return None
|
||||
|
||||
def get_realm_user_available_rolemappings(self, uid, realm="master"):
|
||||
""" Fetch the available role of a realm for a specified user on the Keycloak server.
|
||||
|
||||
:param uid: ID of the user from which to obtain the rolemappings.
|
||||
:param realm: Realm from which to obtain the rolemappings.
|
||||
:return: The rollemappings of specified group and client of the realm (default "master").
|
||||
"""
|
||||
available_rolemappings_url = URL_REALM_ROLEMAPPINGS_AVAILABLE.format(url=self.baseurl, realm=realm, id=uid)
|
||||
try:
|
||||
return json.loads(to_native(open_url(available_rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders,
|
||||
timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not fetch available rolemappings for user %s of realm %s: %s"
|
||||
% (uid, realm, str(e)))
|
||||
|
||||
def get_realm_user_composite_rolemappings(self, uid, realm="master"):
|
||||
""" Fetch the composite role of a realm for a specified user on the Keycloak server.
|
||||
|
||||
:param uid: ID of the user from which to obtain the rolemappings.
|
||||
:param realm: Realm from which to obtain the rolemappings.
|
||||
:return: The effective rollemappings of specified client and user of the realm (default "master").
|
||||
"""
|
||||
composite_rolemappings_url = URL_REALM_ROLEMAPPINGS_COMPOSITE.format(url=self.baseurl, realm=realm, id=uid)
|
||||
try:
|
||||
return json.loads(to_native(open_url(composite_rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders,
|
||||
timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not fetch effective rolemappings for user %s, realm %s: %s"
|
||||
% (uid, realm, str(e)))
|
||||
|
||||
def get_user_by_username(self, username, realm="master"):
|
||||
""" Fetch a keycloak user within a realm based on its username.
|
||||
|
||||
If the user does not exist, None is returned.
|
||||
:param username: Username of the user to fetch.
|
||||
:param realm: Realm in which the user resides; default 'master'
|
||||
"""
|
||||
users_url = URL_USERS.format(url=self.baseurl, realm=realm)
|
||||
users_url += '?username=%s&exact=true' % username
|
||||
try:
|
||||
return json.loads(to_native(open_url(users_url, method='GET', headers=self.restheaders, timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
except ValueError as e:
|
||||
self.module.fail_json(msg='API returned incorrect JSON when trying to obtain the user for realm %s and username %s: %s'
|
||||
% (realm, username, str(e)))
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg='Could not obtain the user for realm %s and username %s: %s'
|
||||
% (realm, username, str(e)))
|
||||
|
||||
def get_service_account_user_by_client_id(self, client_id, realm="master"):
|
||||
""" Fetch a keycloak service account user within a realm based on its client_id.
|
||||
|
||||
If the user does not exist, None is returned.
|
||||
:param client_id: clientId of the service account user to fetch.
|
||||
:param realm: Realm in which the user resides; default 'master'
|
||||
"""
|
||||
cid = self.get_client_id(client_id, realm=realm)
|
||||
|
||||
service_account_user_url = URL_CLIENT_SERVICE_ACCOUNT_USER.format(url=self.baseurl, realm=realm, id=cid)
|
||||
try:
|
||||
return json.loads(to_native(open_url(service_account_user_url, method='GET', headers=self.restheaders, timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
except ValueError as e:
|
||||
self.module.fail_json(msg='API returned incorrect JSON when trying to obtain the service-account-user for realm %s and client_id %s: %s'
|
||||
% (realm, client_id, str(e)))
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg='Could not obtain the service-account-user for realm %s and client_id %s: %s'
|
||||
% (realm, client_id, str(e)))
|
||||
|
||||
def add_user_rolemapping(self, uid, cid, role_rep, realm="master"):
|
||||
""" Assign a realm or client role to a specified user on the Keycloak server.
|
||||
|
||||
:param uid: ID of the user roles are assigned to.
|
||||
:param cid: ID of the client from which to obtain the rolemappings. If empty, roles are from the realm
|
||||
:param role_rep: Representation of the role to assign.
|
||||
:param realm: Realm from which to obtain the rolemappings.
|
||||
:return: None.
|
||||
"""
|
||||
if cid is None:
|
||||
user_realm_rolemappings_url = URL_REALM_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=uid)
|
||||
try:
|
||||
open_url(user_realm_rolemappings_url, method="POST", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(role_rep),
|
||||
validate_certs=self.validate_certs, timeout=self.connection_timeout)
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not map roles to userId %s for realm %s and roles %s: %s"
|
||||
% (uid, realm, json.dumps(role_rep), str(e)))
|
||||
else:
|
||||
user_client_rolemappings_url = URL_CLIENT_USER_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=uid, client=cid)
|
||||
try:
|
||||
open_url(user_client_rolemappings_url, method="POST", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(role_rep),
|
||||
validate_certs=self.validate_certs, timeout=self.connection_timeout)
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not map roles to userId %s for client %s, realm %s and roles %s: %s"
|
||||
% (cid, uid, realm, json.dumps(role_rep), str(e)))
|
||||
|
||||
def delete_user_rolemapping(self, uid, cid, role_rep, realm="master"):
|
||||
""" Delete the rolemapping of a client in a specified user on the Keycloak server.
|
||||
|
||||
:param uid: ID of the user from which to remove the rolemappings.
|
||||
:param cid: ID of the client from which to remove the rolemappings.
|
||||
:param role_rep: Representation of the role to remove from rolemappings.
|
||||
:param realm: Realm from which to remove the rolemappings.
|
||||
:return: None.
|
||||
"""
|
||||
if cid is None:
|
||||
user_realm_rolemappings_url = URL_REALM_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=uid)
|
||||
try:
|
||||
open_url(user_realm_rolemappings_url, method="DELETE", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(role_rep),
|
||||
validate_certs=self.validate_certs, timeout=self.connection_timeout)
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not remove roles %s from userId %s, realm %s: %s"
|
||||
% (json.dumps(role_rep), uid, realm, str(e)))
|
||||
else:
|
||||
user_client_rolemappings_url = URL_CLIENT_USER_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=uid, client=cid)
|
||||
try:
|
||||
open_url(user_client_rolemappings_url, method="DELETE", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(role_rep),
|
||||
validate_certs=self.validate_certs, timeout=self.connection_timeout)
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not remove roles %s for client %s from userId %s, realm %s: %s"
|
||||
% (json.dumps(role_rep), cid, uid, realm, str(e)))
|
||||
|
||||
def get_client_templates(self, realm='master'):
|
||||
""" Obtains client template representations for client templates in a realm
|
||||
|
||||
@@ -930,7 +1191,6 @@ class KeycloakAPI(object):
|
||||
return json.loads(to_native(open_url(groups_url, method="GET", http_agent=self.http_agent, headers=self.restheaders,
|
||||
timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
|
||||
except HTTPError as e:
|
||||
if e.code == 404:
|
||||
return None
|
||||
|
||||
@@ -15,7 +15,7 @@ from ansible_collections.community.general.plugins.module_utils.version import L
|
||||
|
||||
REQUESTS_IMP_ERR = None
|
||||
try:
|
||||
import requests.exceptions
|
||||
import requests.exceptions # noqa: F401, pylint: disable=unused-import
|
||||
HAS_REQUESTS = True
|
||||
except ImportError:
|
||||
REQUESTS_IMP_ERR = traceback.format_exc()
|
||||
@@ -25,7 +25,7 @@ INFLUXDB_IMP_ERR = None
|
||||
try:
|
||||
from influxdb import InfluxDBClient
|
||||
from influxdb import __version__ as influxdb_version
|
||||
from influxdb import exceptions
|
||||
from influxdb import exceptions # noqa: F401, pylint: disable=unused-import
|
||||
HAS_INFLUXDB = True
|
||||
except ImportError:
|
||||
INFLUXDB_IMP_ERR = traceback.format_exc()
|
||||
|
||||
@@ -60,7 +60,7 @@ class LXDClient(object):
|
||||
self.cert_file = cert_file
|
||||
self.key_file = key_file
|
||||
parts = generic_urlparse(urlparse(self.url))
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
ctx = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
|
||||
ctx.load_cert_chain(cert_file, keyfile=key_file)
|
||||
self.connection = HTTPSConnection(parts.get('netloc'), context=ctx)
|
||||
elif url.startswith('unix:'):
|
||||
|
||||
@@ -156,3 +156,315 @@ class ManageIQ(object):
|
||||
msg = "{collection_name} where {params} does not exist in manageiq".format(
|
||||
collection_name=collection_name, params=str(params))
|
||||
self.module.fail_json(msg=msg)
|
||||
|
||||
def policies(self, resource_id, resource_type, resource_name):
|
||||
manageiq = ManageIQ(self.module)
|
||||
|
||||
# query resource id, fail if resource does not exist
|
||||
if resource_id is None:
|
||||
resource_id = manageiq.find_collection_resource_or_fail(resource_type, name=resource_name)['id']
|
||||
|
||||
return ManageIQPolicies(manageiq, resource_type, resource_id)
|
||||
|
||||
def query_resource_id(self, resource_type, resource_name):
|
||||
""" Query the resource name in ManageIQ.
|
||||
|
||||
Returns:
|
||||
the resource ID if it exists in ManageIQ, Fail otherwise.
|
||||
"""
|
||||
resource = self.find_collection_resource_by(resource_type, name=resource_name)
|
||||
if resource:
|
||||
return resource["id"]
|
||||
else:
|
||||
msg = "{resource_name} {resource_type} does not exist in manageiq".format(
|
||||
resource_name=resource_name, resource_type=resource_type)
|
||||
self.module.fail_json(msg=msg)
|
||||
|
||||
|
||||
class ManageIQPolicies(object):
|
||||
"""
|
||||
Object to execute policies management operations of manageiq resources.
|
||||
"""
|
||||
|
||||
def __init__(self, manageiq, resource_type, resource_id):
|
||||
self.manageiq = manageiq
|
||||
|
||||
self.module = self.manageiq.module
|
||||
self.api_url = self.manageiq.api_url
|
||||
self.client = self.manageiq.client
|
||||
|
||||
self.resource_type = resource_type
|
||||
self.resource_id = resource_id
|
||||
self.resource_url = '{api_url}/{resource_type}/{resource_id}'.format(
|
||||
api_url=self.api_url,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id)
|
||||
|
||||
def query_profile_href(self, profile):
|
||||
""" Add or Update the policy_profile href field
|
||||
|
||||
Example:
|
||||
{name: STR, ...} => {name: STR, href: STR}
|
||||
"""
|
||||
resource = self.manageiq.find_collection_resource_or_fail(
|
||||
"policy_profiles", **profile)
|
||||
return dict(name=profile['name'], href=resource['href'])
|
||||
|
||||
def query_resource_profiles(self):
|
||||
""" Returns a set of the profile objects objects assigned to the resource
|
||||
"""
|
||||
url = '{resource_url}/policy_profiles?expand=resources'
|
||||
try:
|
||||
response = self.client.get(url.format(resource_url=self.resource_url))
|
||||
except Exception as e:
|
||||
msg = "Failed to query {resource_type} policies: {error}".format(
|
||||
resource_type=self.resource_type,
|
||||
error=e)
|
||||
self.module.fail_json(msg=msg)
|
||||
|
||||
resources = response.get('resources', [])
|
||||
|
||||
# clean the returned rest api profile object to look like:
|
||||
# {profile_name: STR, profile_description: STR, policies: ARR<POLICIES>}
|
||||
profiles = [self.clean_profile_object(profile) for profile in resources]
|
||||
|
||||
return profiles
|
||||
|
||||
def query_profile_policies(self, profile_id):
|
||||
""" Returns a set of the policy objects assigned to the resource
|
||||
"""
|
||||
url = '{api_url}/policy_profiles/{profile_id}?expand=policies'
|
||||
try:
|
||||
response = self.client.get(url.format(api_url=self.api_url, profile_id=profile_id))
|
||||
except Exception as e:
|
||||
msg = "Failed to query {resource_type} policies: {error}".format(
|
||||
resource_type=self.resource_type,
|
||||
error=e)
|
||||
self.module.fail_json(msg=msg)
|
||||
|
||||
resources = response.get('policies', [])
|
||||
|
||||
# clean the returned rest api policy object to look like:
|
||||
# {name: STR, description: STR, active: BOOL}
|
||||
policies = [self.clean_policy_object(policy) for policy in resources]
|
||||
|
||||
return policies
|
||||
|
||||
def clean_policy_object(self, policy):
|
||||
""" Clean a policy object to have human readable form of:
|
||||
{
|
||||
name: STR,
|
||||
description: STR,
|
||||
active: BOOL
|
||||
}
|
||||
"""
|
||||
name = policy.get('name')
|
||||
description = policy.get('description')
|
||||
active = policy.get('active')
|
||||
|
||||
return dict(
|
||||
name=name,
|
||||
description=description,
|
||||
active=active)
|
||||
|
||||
def clean_profile_object(self, profile):
|
||||
""" Clean a profile object to have human readable form of:
|
||||
{
|
||||
profile_name: STR,
|
||||
profile_description: STR,
|
||||
policies: ARR<POLICIES>
|
||||
}
|
||||
"""
|
||||
profile_id = profile['id']
|
||||
name = profile.get('name')
|
||||
description = profile.get('description')
|
||||
policies = self.query_profile_policies(profile_id)
|
||||
|
||||
return dict(
|
||||
profile_name=name,
|
||||
profile_description=description,
|
||||
policies=policies)
|
||||
|
||||
def profiles_to_update(self, profiles, action):
|
||||
""" Create a list of policies we need to update in ManageIQ.
|
||||
|
||||
Returns:
|
||||
Whether or not a change took place and a message describing the
|
||||
operation executed.
|
||||
"""
|
||||
profiles_to_post = []
|
||||
assigned_profiles = self.query_resource_profiles()
|
||||
|
||||
# make a list of assigned full profile names strings
|
||||
# e.g. ['openscap profile', ...]
|
||||
assigned_profiles_set = set([profile['profile_name'] for profile in assigned_profiles])
|
||||
|
||||
for profile in profiles:
|
||||
assigned = profile.get('name') in assigned_profiles_set
|
||||
|
||||
if (action == 'unassign' and assigned) or (action == 'assign' and not assigned):
|
||||
# add/update the policy profile href field
|
||||
# {name: STR, ...} => {name: STR, href: STR}
|
||||
profile = self.query_profile_href(profile)
|
||||
profiles_to_post.append(profile)
|
||||
|
||||
return profiles_to_post
|
||||
|
||||
def assign_or_unassign_profiles(self, profiles, action):
|
||||
""" Perform assign/unassign action
|
||||
"""
|
||||
# get a list of profiles needed to be changed
|
||||
profiles_to_post = self.profiles_to_update(profiles, action)
|
||||
if not profiles_to_post:
|
||||
return dict(
|
||||
changed=False,
|
||||
msg="Profiles {profiles} already {action}ed, nothing to do".format(
|
||||
action=action,
|
||||
profiles=profiles))
|
||||
|
||||
# try to assign or unassign profiles to resource
|
||||
url = '{resource_url}/policy_profiles'.format(resource_url=self.resource_url)
|
||||
try:
|
||||
response = self.client.post(url, action=action, resources=profiles_to_post)
|
||||
except Exception as e:
|
||||
msg = "Failed to {action} profile: {error}".format(
|
||||
action=action,
|
||||
error=e)
|
||||
self.module.fail_json(msg=msg)
|
||||
|
||||
# check all entities in result to be successful
|
||||
for result in response['results']:
|
||||
if not result['success']:
|
||||
msg = "Failed to {action}: {message}".format(
|
||||
action=action,
|
||||
message=result['message'])
|
||||
self.module.fail_json(msg=msg)
|
||||
|
||||
# successfully changed all needed profiles
|
||||
return dict(
|
||||
changed=True,
|
||||
msg="Successfully {action}ed profiles: {profiles}".format(
|
||||
action=action,
|
||||
profiles=profiles))
|
||||
|
||||
|
||||
class ManageIQTags(object):
|
||||
"""
|
||||
Object to execute tags management operations of manageiq resources.
|
||||
"""
|
||||
|
||||
def __init__(self, manageiq, resource_type, resource_id):
|
||||
self.manageiq = manageiq
|
||||
|
||||
self.module = self.manageiq.module
|
||||
self.api_url = self.manageiq.api_url
|
||||
self.client = self.manageiq.client
|
||||
|
||||
self.resource_type = resource_type
|
||||
self.resource_id = resource_id
|
||||
self.resource_url = '{api_url}/{resource_type}/{resource_id}'.format(
|
||||
api_url=self.api_url,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id)
|
||||
|
||||
def full_tag_name(self, tag):
|
||||
""" Returns the full tag name in manageiq
|
||||
"""
|
||||
return '/managed/{tag_category}/{tag_name}'.format(
|
||||
tag_category=tag['category'],
|
||||
tag_name=tag['name'])
|
||||
|
||||
def clean_tag_object(self, tag):
|
||||
""" Clean a tag object to have human readable form of:
|
||||
{
|
||||
full_name: STR,
|
||||
name: STR,
|
||||
display_name: STR,
|
||||
category: STR
|
||||
}
|
||||
"""
|
||||
full_name = tag.get('name')
|
||||
categorization = tag.get('categorization', {})
|
||||
|
||||
return dict(
|
||||
full_name=full_name,
|
||||
name=categorization.get('name'),
|
||||
display_name=categorization.get('display_name'),
|
||||
category=categorization.get('category', {}).get('name'))
|
||||
|
||||
def query_resource_tags(self):
|
||||
""" Returns a set of the tag objects assigned to the resource
|
||||
"""
|
||||
url = '{resource_url}/tags?expand=resources&attributes=categorization'
|
||||
try:
|
||||
response = self.client.get(url.format(resource_url=self.resource_url))
|
||||
except Exception as e:
|
||||
msg = "Failed to query {resource_type} tags: {error}".format(
|
||||
resource_type=self.resource_type,
|
||||
error=e)
|
||||
self.module.fail_json(msg=msg)
|
||||
|
||||
resources = response.get('resources', [])
|
||||
|
||||
# clean the returned rest api tag object to look like:
|
||||
# {full_name: STR, name: STR, display_name: STR, category: STR}
|
||||
tags = [self.clean_tag_object(tag) for tag in resources]
|
||||
|
||||
return tags
|
||||
|
||||
def tags_to_update(self, tags, action):
|
||||
""" Create a list of tags we need to update in ManageIQ.
|
||||
|
||||
Returns:
|
||||
Whether or not a change took place and a message describing the
|
||||
operation executed.
|
||||
"""
|
||||
tags_to_post = []
|
||||
assigned_tags = self.query_resource_tags()
|
||||
|
||||
# make a list of assigned full tag names strings
|
||||
# e.g. ['/managed/environment/prod', ...]
|
||||
assigned_tags_set = set([tag['full_name'] for tag in assigned_tags])
|
||||
|
||||
for tag in tags:
|
||||
assigned = self.full_tag_name(tag) in assigned_tags_set
|
||||
|
||||
if assigned and action == 'unassign':
|
||||
tags_to_post.append(tag)
|
||||
elif (not assigned) and action == 'assign':
|
||||
tags_to_post.append(tag)
|
||||
|
||||
return tags_to_post
|
||||
|
||||
def assign_or_unassign_tags(self, tags, action):
|
||||
""" Perform assign/unassign action
|
||||
"""
|
||||
# get a list of tags needed to be changed
|
||||
tags_to_post = self.tags_to_update(tags, action)
|
||||
if not tags_to_post:
|
||||
return dict(
|
||||
changed=False,
|
||||
msg="Tags already {action}ed, nothing to do".format(action=action))
|
||||
|
||||
# try to assign or unassign tags to resource
|
||||
url = '{resource_url}/tags'.format(resource_url=self.resource_url)
|
||||
try:
|
||||
response = self.client.post(url, action=action, resources=tags)
|
||||
except Exception as e:
|
||||
msg = "Failed to {action} tag: {error}".format(
|
||||
action=action,
|
||||
error=e)
|
||||
self.module.fail_json(msg=msg)
|
||||
|
||||
# check all entities in result to be successful
|
||||
for result in response['results']:
|
||||
if not result['success']:
|
||||
msg = "Failed to {action}: {message}".format(
|
||||
action=action,
|
||||
message=result['message'])
|
||||
self.module.fail_json(msg=msg)
|
||||
|
||||
# successfully changed all needed tags
|
||||
return dict(
|
||||
changed=True,
|
||||
msg="Successfully {action}ed tags".format(action=action))
|
||||
|
||||
@@ -26,6 +26,7 @@ class Response(object):
|
||||
def __init__(self):
|
||||
self.content = None
|
||||
self.status_code = None
|
||||
self.stderr = None
|
||||
|
||||
def json(self):
|
||||
return json.loads(self.content)
|
||||
@@ -75,6 +76,10 @@ def memset_api_call(api_key, api_method, payload=None):
|
||||
msg = "Memset API returned a {0} response ({1}, {2})." . format(response.status_code, response.json()['error_type'], response.json()['error'])
|
||||
else:
|
||||
msg = "Memset API returned an error ({0}, {1})." . format(response.json()['error_type'], response.json()['error'])
|
||||
except urllib_error.URLError as e:
|
||||
has_failed = True
|
||||
msg = "An URLError occured ({0})." . format(type(e))
|
||||
response.stderr = "{0}" . format(e)
|
||||
|
||||
if msg is None:
|
||||
msg = response.json()
|
||||
|
||||
@@ -34,6 +34,10 @@ class ArgFormat(object):
|
||||
|
||||
def __init__(self, name, fmt=None, style=FORMAT, stars=0):
|
||||
"""
|
||||
THIS CLASS IS BEING DEPRECATED.
|
||||
It was never meant to be used outside the scope of CmdMixin, and CmdMixin is being deprecated.
|
||||
See the deprecation notice in ``CmdMixin.__init__()`` below.
|
||||
|
||||
Creates a CLI-formatter for one specific argument. The argument may be a module parameter or just a named parameter for
|
||||
the CLI command execution.
|
||||
:param name: Name of the argument to be formatted
|
||||
@@ -88,6 +92,9 @@ class ArgFormat(object):
|
||||
|
||||
class CmdMixin(object):
|
||||
"""
|
||||
THIS CLASS IS BEING DEPRECATED.
|
||||
See the deprecation notice in ``CmdMixin.__init__()`` below.
|
||||
|
||||
Mixin for mapping module options to running a CLI command with its arguments.
|
||||
"""
|
||||
command = None
|
||||
@@ -110,6 +117,15 @@ class CmdMixin(object):
|
||||
result[param] = ArgFormat(param, **fmt_spec)
|
||||
return result
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CmdMixin, self).__init__(*args, **kwargs)
|
||||
self.module.deprecate(
|
||||
'The CmdMixin used in classes CmdModuleHelper and CmdStateModuleHelper is being deprecated. '
|
||||
'Modules should use community.general.plugins.module_utils.cmd_runner.CmdRunner instead.',
|
||||
version='8.0.0',
|
||||
collection_name='community.general',
|
||||
)
|
||||
|
||||
def _calculate_args(self, extra_params=None, params=None):
|
||||
def add_arg_formatted_param(_cmd_args, arg_format, _value):
|
||||
args = list(arg_format.to_text(_value))
|
||||
|
||||
@@ -9,7 +9,8 @@ __metaclass__ = type
|
||||
|
||||
from ansible.module_utils.common.dict_transformations import dict_merge
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.base import ModuleHelperBase, AnsibleModule
|
||||
# (TODO: remove AnsibleModule!) pylint: disable-next=unused-import
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.base import ModuleHelperBase, AnsibleModule # noqa: F401
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.cmd import CmdMixin
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.state import StateMixin
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.deps import DependencyMixin
|
||||
@@ -72,7 +73,7 @@ class ModuleHelper(DeprecateAttrsMixin, VarsMixin, DependencyMixin, ModuleHelper
|
||||
vars_diff = self.vars.diff() or {}
|
||||
result['diff'] = dict_merge(dict(diff), vars_diff)
|
||||
|
||||
for varname in result:
|
||||
for varname in list(result):
|
||||
if varname in self._output_conflict_list:
|
||||
result["_" + varname] = result[varname]
|
||||
del result[varname]
|
||||
@@ -84,8 +85,16 @@ class StateModuleHelper(StateMixin, ModuleHelper):
|
||||
|
||||
|
||||
class CmdModuleHelper(CmdMixin, ModuleHelper):
|
||||
"""
|
||||
THIS CLASS IS BEING DEPRECATED.
|
||||
See the deprecation notice in ``CmdMixin.__init__()``.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class CmdStateModuleHelper(CmdMixin, StateMixin, ModuleHelper):
|
||||
"""
|
||||
THIS CLASS IS BEING DEPRECATED.
|
||||
See the deprecation notice in ``CmdMixin.__init__()``.
|
||||
"""
|
||||
pass
|
||||
|
||||
@@ -8,12 +8,13 @@ from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.module_helper import (
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.module_helper import ( # noqa: F401, pylint: disable=unused-import
|
||||
ModuleHelper, StateModuleHelper, CmdModuleHelper, CmdStateModuleHelper, AnsibleModule
|
||||
)
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.cmd import CmdMixin, ArgFormat
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.state import StateMixin
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.deps import DependencyCtxMgr
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.exceptions import ModuleHelperException
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.deco import cause_changes, module_fails_on_exception
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.vars import VarMeta, VarDict
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.cmd import CmdMixin, ArgFormat # noqa: F401, pylint: disable=unused-import
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.state import StateMixin # noqa: F401, pylint: disable=unused-import
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.deps import DependencyCtxMgr # noqa: F401, pylint: disable=unused-import
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.exceptions import ModuleHelperException # noqa: F401, pylint: disable=unused-import
|
||||
# pylint: disable-next=unused-import
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.deco import cause_changes, module_fails_on_exception # noqa: F401
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.vars import VarMeta, VarDict # noqa: F401, pylint: disable=unused-import
|
||||
|
||||
@@ -16,7 +16,8 @@ __metaclass__ = type
|
||||
import abc
|
||||
import collections
|
||||
import json
|
||||
import os
|
||||
# (TODO: remove next line!)
|
||||
import os # noqa: F401, pylint: disable=unused-import
|
||||
import traceback
|
||||
|
||||
HPE_ONEVIEW_IMP_ERR = None
|
||||
|
||||
@@ -10,13 +10,14 @@ import logging
|
||||
import logging.config
|
||||
import os
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
# (TODO: remove next line!)
|
||||
from datetime import datetime # noqa: F401, pylint: disable=unused-import
|
||||
from operator import eq
|
||||
|
||||
import time
|
||||
|
||||
try:
|
||||
import yaml
|
||||
import yaml # noqa: F401, pylint: disable=unused-import
|
||||
|
||||
import oci
|
||||
from oci.constants import HEADER_NEXT_PAGE
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
from ansible_collections.community.general.plugins.module_utils.cmd_runner import CmdRunner, cmd_runner_fmt as fmt
|
||||
|
||||
|
||||
|
||||
@@ -7,9 +7,12 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
import atexit
|
||||
import time
|
||||
import re
|
||||
# (TODO: remove next line!)
|
||||
import atexit # noqa: F401, pylint: disable=unused-import
|
||||
# (TODO: remove next line!)
|
||||
import time # noqa: F401, pylint: disable=unused-import
|
||||
# (TODO: remove next line!)
|
||||
import re # noqa: F401, pylint: disable=unused-import
|
||||
import traceback
|
||||
|
||||
PROXMOXER_IMP_ERR = None
|
||||
@@ -22,7 +25,8 @@ except ImportError:
|
||||
|
||||
|
||||
from ansible.module_utils.basic import env_fallback, missing_required_lib
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
# (TODO: remove next line!)
|
||||
from ansible.module_utils.common.text.converters import to_native # noqa: F401, pylint: disable=unused-import
|
||||
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||
|
||||
|
||||
@@ -137,3 +141,7 @@ class ProxmoxAnsible(object):
|
||||
return None
|
||||
|
||||
self.module.fail_json(msg='VM with vmid %s does not exist in cluster' % vmid)
|
||||
|
||||
def api_task_ok(self, node, taskid):
|
||||
status = self.proxmox_api.nodes(node).tasks(taskid).status.get()
|
||||
return status['status'] == 'stopped' and status['exitstatus'] == 'OK'
|
||||
|
||||
@@ -21,13 +21,15 @@ except ImportError:
|
||||
|
||||
HAS_PURITY_FB = True
|
||||
try:
|
||||
from purity_fb import PurityFb, FileSystem, FileSystemSnapshot, SnapshotSuffix, rest
|
||||
from purity_fb import PurityFb, FileSystem, FileSystemSnapshot, SnapshotSuffix, rest # noqa: F401, pylint: disable=unused-import
|
||||
except ImportError:
|
||||
HAS_PURITY_FB = False
|
||||
|
||||
from functools import wraps
|
||||
# (TODO: remove next line!)
|
||||
from functools import wraps # noqa: F401, pylint: disable=unused-import
|
||||
from os import environ
|
||||
from os import path
|
||||
# (TODO: remove next line!)
|
||||
from os import path # noqa: F401, pylint: disable=unused-import
|
||||
import platform
|
||||
|
||||
VERSION = 1.2
|
||||
|
||||
@@ -36,6 +36,8 @@ class RedfishUtils(object):
|
||||
self.timeout = timeout
|
||||
self.module = module
|
||||
self.service_root = '/redfish/v1/'
|
||||
self.session_service_uri = '/redfish/v1/SessionService'
|
||||
self.sessions_uri = '/redfish/v1/SessionService/Sessions'
|
||||
self.resource_id = resource_id
|
||||
self.data_modification = data_modification
|
||||
self.strip_etag_quotes = strip_etag_quotes
|
||||
@@ -66,6 +68,10 @@ class RedfishUtils(object):
|
||||
req_headers = dict(GET_HEADERS)
|
||||
username, password, basic_auth = self._auth_params(req_headers)
|
||||
try:
|
||||
# Service root is an unauthenticated resource; remove credentials
|
||||
# in case the caller will be using sessions later.
|
||||
if uri == (self.root_uri + self.service_root):
|
||||
basic_auth = False
|
||||
resp = open_url(uri, method="GET", headers=req_headers,
|
||||
url_username=username, url_password=password,
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
@@ -92,6 +98,11 @@ class RedfishUtils(object):
|
||||
req_headers = dict(POST_HEADERS)
|
||||
username, password, basic_auth = self._auth_params(req_headers)
|
||||
try:
|
||||
# When performing a POST to the session collection, credentials are
|
||||
# provided in the request body. Do not provide the basic auth
|
||||
# header since this can cause conflicts with some services
|
||||
if self.sessions_uri is not None and uri == (self.root_uri + self.sessions_uri):
|
||||
basic_auth = False
|
||||
resp = open_url(uri, data=json.dumps(pyld),
|
||||
headers=req_headers, method="POST",
|
||||
url_username=username, url_password=password,
|
||||
@@ -232,22 +243,23 @@ class RedfishUtils(object):
|
||||
return {'ret': True}
|
||||
|
||||
def _find_sessionservice_resource(self):
|
||||
# Get the service root
|
||||
response = self.get_request(self.root_uri + self.service_root)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
data = response['data']
|
||||
if 'SessionService' not in data:
|
||||
|
||||
# Check for the session service and session collection. Well-known
|
||||
# defaults are provided in the constructor, but services that predate
|
||||
# Redfish 1.6.0 might contain different values.
|
||||
self.session_service_uri = data.get('SessionService', {}).get('@odata.id')
|
||||
self.sessions_uri = data.get('Links', {}).get('Sessions', {}).get('@odata.id')
|
||||
|
||||
# If one isn't found, return an error
|
||||
if self.session_service_uri is None:
|
||||
return {'ret': False, 'msg': "SessionService resource not found"}
|
||||
else:
|
||||
session_service = data["SessionService"]["@odata.id"]
|
||||
response = self.get_request(self.root_uri + session_service)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
data = response['data']
|
||||
sessions = data['Sessions']['@odata.id']
|
||||
if sessions[-1:] == '/':
|
||||
sessions = sessions[:-1]
|
||||
self.sessions_uri = sessions
|
||||
if self.sessions_uri is None:
|
||||
return {'ret': False, 'msg': "SessionCollection resource not found"}
|
||||
return {'ret': True}
|
||||
|
||||
def _get_resource_uri_by_id(self, uris, id_prop):
|
||||
@@ -3081,3 +3093,60 @@ class RedfishUtils(object):
|
||||
|
||||
def get_multi_manager_inventory(self):
|
||||
return self.aggregate_managers(self.get_manager_inventory)
|
||||
|
||||
def set_session_service(self, sessions_config):
|
||||
result = {}
|
||||
response = self.get_request(self.root_uri + self.session_service_uri)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
current_sessions_config = response['data']
|
||||
payload = {}
|
||||
for property, value in sessions_config.items():
|
||||
value = sessions_config[property]
|
||||
if property not in current_sessions_config:
|
||||
return {'ret': False, 'msg': "Property %s in sessions_config is invalid" % property}
|
||||
if isinstance(value, dict):
|
||||
if isinstance(current_sessions_config[property], dict):
|
||||
payload[property] = value
|
||||
elif isinstance(current_sessions_config[property], list):
|
||||
payload[property] = [value]
|
||||
else:
|
||||
return {'ret': False, 'msg': "Value of property %s in sessions_config is invalid" % property}
|
||||
else:
|
||||
payload[property] = value
|
||||
|
||||
need_change = False
|
||||
for property, set_value in payload.items():
|
||||
cur_value = current_sessions_config[property]
|
||||
if not isinstance(set_value, (dict, list)):
|
||||
if set_value != cur_value:
|
||||
need_change = True
|
||||
if isinstance(set_value, dict):
|
||||
for subprop in set_value.keys():
|
||||
if subprop not in current_sessions_config[property]:
|
||||
need_change = True
|
||||
break
|
||||
sub_set_value = set_value[subprop]
|
||||
sub_cur_value = current_sessions_config[property][subprop]
|
||||
if sub_set_value != sub_cur_value:
|
||||
need_change = True
|
||||
if isinstance(set_value, list):
|
||||
if len(set_value) != len(cur_value):
|
||||
need_change = True
|
||||
continue
|
||||
for i in range(len(set_value)):
|
||||
for subprop in set_value[i].keys():
|
||||
if subprop not in current_sessions_config[property][i]:
|
||||
need_change = True
|
||||
break
|
||||
sub_set_value = set_value[i][subprop]
|
||||
sub_cur_value = current_sessions_config[property][i][subprop]
|
||||
if sub_set_value != sub_cur_value:
|
||||
need_change = True
|
||||
if not need_change:
|
||||
return {'ret': True, 'changed': False, 'msg': "SessionService already configured"}
|
||||
|
||||
response = self.patch_request(self.root_uri + self.session_service_uri, payload)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
return {'ret': True, 'changed': True, 'msg': "Modified SessionService"}
|
||||
|
||||
@@ -9,6 +9,8 @@ __metaclass__ = type
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
import datetime
|
||||
import time
|
||||
|
||||
from ansible.module_utils.basic import env_fallback
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
@@ -26,6 +28,14 @@ def scaleway_argument_spec():
|
||||
)
|
||||
|
||||
|
||||
def scaleway_waitable_resource_argument_spec():
|
||||
return dict(
|
||||
wait=dict(type="bool", default=True),
|
||||
wait_timeout=dict(type="int", default=300),
|
||||
wait_sleep_time=dict(type="int", default=3),
|
||||
)
|
||||
|
||||
|
||||
def payload_from_object(scw_object):
|
||||
return dict(
|
||||
(k, v)
|
||||
@@ -63,6 +73,25 @@ def parse_pagination_link(header):
|
||||
return parsed_relations
|
||||
|
||||
|
||||
def filter_sensitive_attributes(container, attributes):
|
||||
for attr in attributes:
|
||||
container[attr] = "SENSITIVE_VALUE"
|
||||
|
||||
return container
|
||||
|
||||
|
||||
def resource_attributes_should_be_changed(target, wished, verifiable_mutable_attributes, mutable_attributes):
|
||||
diff = dict()
|
||||
for attr in verifiable_mutable_attributes:
|
||||
if wished[attr] is not None and target[attr] != wished[attr]:
|
||||
diff[attr] = wished[attr]
|
||||
|
||||
if diff:
|
||||
return dict((attr, wished[attr]) for attr in mutable_attributes)
|
||||
else:
|
||||
return diff
|
||||
|
||||
|
||||
class Response(object):
|
||||
|
||||
def __init__(self, resp, info):
|
||||
@@ -169,6 +198,78 @@ class Scaleway(object):
|
||||
def warn(self, x):
|
||||
self.module.warn(str(x))
|
||||
|
||||
def fetch_state(self, resource):
|
||||
self.module.debug("fetch_state of resource: %s" % resource["id"])
|
||||
response = self.get(path=self.api_path + "/%s" % resource["id"])
|
||||
|
||||
if response.status_code == 404:
|
||||
return "absent"
|
||||
|
||||
if not response.ok:
|
||||
msg = 'Error during state fetching: (%s) %s' % (response.status_code, response.json)
|
||||
self.module.fail_json(msg=msg)
|
||||
|
||||
try:
|
||||
self.module.debug("Resource %s in state: %s" % (resource["id"], response.json["status"]))
|
||||
return response.json["status"]
|
||||
except KeyError:
|
||||
self.module.fail_json(msg="Could not fetch state in %s" % response.json)
|
||||
|
||||
def fetch_paginated_resources(self, resource_key, **pagination_kwargs):
|
||||
response = self.get(
|
||||
path=self.api_path,
|
||||
params=pagination_kwargs)
|
||||
|
||||
status_code = response.status_code
|
||||
if not response.ok:
|
||||
self.module.fail_json(msg='Error getting {0} [{1}: {2}]'.format(
|
||||
resource_key,
|
||||
response.status_code, response.json['message']))
|
||||
|
||||
return response.json[resource_key]
|
||||
|
||||
def fetch_all_resources(self, resource_key, **pagination_kwargs):
|
||||
resources = []
|
||||
|
||||
result = [None]
|
||||
while len(result) != 0:
|
||||
result = self.fetch_paginated_resources(resource_key, **pagination_kwargs)
|
||||
resources += result
|
||||
if 'page' in pagination_kwargs:
|
||||
pagination_kwargs['page'] += 1
|
||||
else:
|
||||
pagination_kwargs['page'] = 2
|
||||
|
||||
return resources
|
||||
|
||||
def wait_to_complete_state_transition(self, resource, stable_states, force_wait=False):
|
||||
wait = self.module.params["wait"]
|
||||
|
||||
if not (wait or force_wait):
|
||||
return
|
||||
|
||||
wait_timeout = self.module.params["wait_timeout"]
|
||||
wait_sleep_time = self.module.params["wait_sleep_time"]
|
||||
|
||||
# Prevent requesting the ressource status too soon
|
||||
time.sleep(wait_sleep_time)
|
||||
|
||||
start = datetime.datetime.utcnow()
|
||||
end = start + datetime.timedelta(seconds=wait_timeout)
|
||||
|
||||
while datetime.datetime.utcnow() < end:
|
||||
self.module.debug("We are going to wait for the resource to finish its transition")
|
||||
|
||||
state = self.fetch_state(resource)
|
||||
if state in stable_states:
|
||||
self.module.debug("It seems that the resource is not in transition anymore.")
|
||||
self.module.debug("load-balancer in state: %s" % self.fetch_state(resource))
|
||||
break
|
||||
|
||||
time.sleep(wait_sleep_time)
|
||||
else:
|
||||
self.module.fail_json(msg="Server takes too long to finish its transition")
|
||||
|
||||
|
||||
SCALEWAY_LOCATION = {
|
||||
'par1': {
|
||||
|
||||
@@ -13,10 +13,10 @@ __metaclass__ = type
|
||||
from ansible.module_utils.six import raise_from
|
||||
|
||||
try:
|
||||
from ansible.module_utils.compat.version import LooseVersion
|
||||
from ansible.module_utils.compat.version import LooseVersion # noqa: F401, pylint: disable=unused-import
|
||||
except ImportError:
|
||||
try:
|
||||
from distutils.version import LooseVersion
|
||||
from distutils.version import LooseVersion # noqa: F401, pylint: disable=unused-import
|
||||
except ImportError as exc:
|
||||
msg = 'To use this plugin or module with ansible-core 2.11, you need to use Python < 3.12 with distutils.version present'
|
||||
raise_from(ImportError(msg), exc)
|
||||
|
||||
@@ -27,7 +27,7 @@ __metaclass__ = type
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: ali_instance
|
||||
short_description: Create, Start, Stop, Restart or Terminate an Instance in ECS. Add or Remove Instance to/from a Security Group.
|
||||
short_description: Create, Start, Stop, Restart or Terminate an Instance in ECS; Add or Remove Instance to/from a Security Group
|
||||
description:
|
||||
- Create, start, stop, restart, modify or terminate ecs instances.
|
||||
- Add or remove ecs instances to/from security group.
|
||||
|
||||
@@ -27,7 +27,7 @@ __metaclass__ = type
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: ali_instance_info
|
||||
short_description: Gather information on instances of Alibaba Cloud ECS.
|
||||
short_description: Gather information on instances of Alibaba Cloud ECS
|
||||
description:
|
||||
- This module fetches data from the Open API in Alicloud.
|
||||
The module must be called from within the ECS instance itself.
|
||||
|
||||
@@ -61,6 +61,7 @@ options:
|
||||
- The values specified here will be used at installation time as --set arguments for atomic install.
|
||||
type: list
|
||||
elements: str
|
||||
default: []
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
|
||||
@@ -11,7 +11,7 @@ __metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
module: clc_aa_policy
|
||||
short_description: Create or Delete Anti Affinity Policies at CenturyLink Cloud.
|
||||
short_description: Create or Delete Anti Affinity Policies at CenturyLink Cloud
|
||||
description:
|
||||
- An Ansible module to Create or Delete Anti Affinity Policies at CenturyLink Cloud.
|
||||
options:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user