mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-04-28 09:26:44 +00:00
Compare commits
208 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e67ba12211 | ||
|
|
2f04bd32d0 | ||
|
|
62613427af | ||
|
|
febf5f24ab | ||
|
|
6652261ef4 | ||
|
|
c4da880c46 | ||
|
|
1d87acef96 | ||
|
|
8bc51bb0ec | ||
|
|
504759ce92 | ||
|
|
a3704353c9 | ||
|
|
fef1a495e1 | ||
|
|
871829dd97 | ||
|
|
957ccd463e | ||
|
|
a0c7c10099 | ||
|
|
50e2794658 | ||
|
|
c88610305b | ||
|
|
9f0af5380c | ||
|
|
c512b789cb | ||
|
|
ec23171586 | ||
|
|
1704f947e3 | ||
|
|
80c7fc2d12 | ||
|
|
5b1bb61b9e | ||
|
|
56e00efcba | ||
|
|
ef42314714 | ||
|
|
9a6eb4e028 | ||
|
|
cf4b814c2d | ||
|
|
f47fced4ca | ||
|
|
370fa9814a | ||
|
|
7f60b1f2dd | ||
|
|
bbbc98a751 | ||
|
|
233743f2fe | ||
|
|
8539c534e3 | ||
|
|
eadf1320df | ||
|
|
1242dff77f | ||
|
|
2ce82ce1fa | ||
|
|
f3aab7a5b8 | ||
|
|
b3d79d728e | ||
|
|
ecd6bca049 | ||
|
|
84c883e854 | ||
|
|
91c8d6badc | ||
|
|
3cf6a67f74 | ||
|
|
3593d9c17c | ||
|
|
b0910d6a47 | ||
|
|
5c6053bf79 | ||
|
|
d789351195 | ||
|
|
a4739d8a36 | ||
|
|
986c0ab03a | ||
|
|
67279e7ca1 | ||
|
|
f34cd9ddb9 | ||
|
|
cbab5e887d | ||
|
|
c0cd4827da | ||
|
|
833c21a2bc | ||
|
|
cd333e6575 | ||
|
|
a70de88577 | ||
|
|
3ce83dcf6a | ||
|
|
6448372c04 | ||
|
|
be09373815 | ||
|
|
9a6f7c5c3f | ||
|
|
1f94bd4a17 | ||
|
|
b15a2c52e3 | ||
|
|
f764685c53 | ||
|
|
1d6552e005 | ||
|
|
770ae38aff | ||
|
|
4edae7afd0 | ||
|
|
40f87e25ff | ||
|
|
ebf8d9cde1 | ||
|
|
f0c5dd9cbc | ||
|
|
656a7f7087 | ||
|
|
c2303926af | ||
|
|
aa707d665e | ||
|
|
be2fd43243 | ||
|
|
b2eb0fb8f8 | ||
|
|
57053f8a32 | ||
|
|
583a7a75d3 | ||
|
|
065fdf990d | ||
|
|
2ed7e96372 | ||
|
|
985fbb321b | ||
|
|
037863b834 | ||
|
|
e19fda6cb0 | ||
|
|
4175c4c8fe | ||
|
|
e64d124e18 | ||
|
|
52c0a1565d | ||
|
|
df89012081 | ||
|
|
ee2d7cd21b | ||
|
|
22735bcc73 | ||
|
|
5100d972b3 | ||
|
|
7ce39c38c9 | ||
|
|
6316bd6e4d | ||
|
|
d3b9759ef1 | ||
|
|
2321a12d07 | ||
|
|
e8f9f21be1 | ||
|
|
f1fee975ba | ||
|
|
d4e831f31d | ||
|
|
07d0de5640 | ||
|
|
c1309ceb8b | ||
|
|
00efbe6ea2 | ||
|
|
d18092a128 | ||
|
|
b783d025df | ||
|
|
113764215d | ||
|
|
ef8fb888cd | ||
|
|
8385d2eb39 | ||
|
|
de38d23bdc | ||
|
|
3cb9b0fa91 | ||
|
|
551e5e4bd5 | ||
|
|
c75711167f | ||
|
|
b279694779 | ||
|
|
625d22391f | ||
|
|
1b488b53f5 | ||
|
|
51648d5328 | ||
|
|
87aedc7bd6 | ||
|
|
1a0c9eb5e6 | ||
|
|
b862c0db49 | ||
|
|
adba23c223 | ||
|
|
7fa84e8ec7 | ||
|
|
14a86ed0ad | ||
|
|
dcfd0f47e6 | ||
|
|
481570d0e3 | ||
|
|
9254110b8b | ||
|
|
17c8e274dc | ||
|
|
30289c7a03 | ||
|
|
e8861cafa6 | ||
|
|
c47888a5f9 | ||
|
|
58ba101990 | ||
|
|
bf54291500 | ||
|
|
8f27ef76f5 | ||
|
|
61e82c50e4 | ||
|
|
dfbde55aeb | ||
|
|
24b6441580 | ||
|
|
4381ac1bf3 | ||
|
|
e83bb285b2 | ||
|
|
edd4637b9f | ||
|
|
eefdf5b58e | ||
|
|
39c39e3de1 | ||
|
|
50284d1292 | ||
|
|
1590892a56 | ||
|
|
f6722c142d | ||
|
|
417db583e7 | ||
|
|
aa3b53fb87 | ||
|
|
ffca7eaf52 | ||
|
|
5b9b98340b | ||
|
|
4be9bb1118 | ||
|
|
d50476cdab | ||
|
|
363e8662b0 | ||
|
|
5365dcef3c | ||
|
|
89accbfa2b | ||
|
|
63210f4fc4 | ||
|
|
01864514c2 | ||
|
|
418589e346 | ||
|
|
88fab247ca | ||
|
|
56edbfc539 | ||
|
|
c94fa6132d | ||
|
|
2fa17c32a3 | ||
|
|
926f627128 | ||
|
|
7c6f286df2 | ||
|
|
b6ed6787b5 | ||
|
|
94a350e72b | ||
|
|
46d454eae0 | ||
|
|
adfd73d7ed | ||
|
|
aa2a5d9578 | ||
|
|
0f300bddb9 | ||
|
|
3785b656d6 | ||
|
|
16499072ff | ||
|
|
cad6b30036 | ||
|
|
2df1126d27 | ||
|
|
0d5ec37249 | ||
|
|
7c04aaa48f | ||
|
|
80113063ac | ||
|
|
1b09e8168a | ||
|
|
aadd48461c | ||
|
|
d565a20013 | ||
|
|
c69fb82ee0 | ||
|
|
cffc3dad11 | ||
|
|
a27025946b | ||
|
|
1825feb652 | ||
|
|
0c2d1eda44 | ||
|
|
d617f6919f | ||
|
|
b17cc09b07 | ||
|
|
ee7f44b09b | ||
|
|
a357944fb0 | ||
|
|
5d7d973f6d | ||
|
|
f3a516b79d | ||
|
|
d4eaef2d83 | ||
|
|
235e55fa9f | ||
|
|
c3baaa8cfa | ||
|
|
d68f6fcfff | ||
|
|
70e4ae440c | ||
|
|
8b66bb9a02 | ||
|
|
76fbb50270 | ||
|
|
93971b292a | ||
|
|
724bba79d5 | ||
|
|
e44f43b4d2 | ||
|
|
f82422502b | ||
|
|
5588ce3741 | ||
|
|
719ecc9e85 | ||
|
|
1a801323a8 | ||
|
|
7ebb301930 | ||
|
|
fb5047b605 | ||
|
|
b7977b8fa9 | ||
|
|
bae1440425 | ||
|
|
04f3dd2b56 | ||
|
|
99e3965ece | ||
|
|
14625a214a | ||
|
|
3c067aa2c3 | ||
|
|
01004bd27b | ||
|
|
f8265ecc4e | ||
|
|
2e355bef9f | ||
|
|
e6f65634fe | ||
|
|
61314898ca |
@@ -29,14 +29,14 @@ schedules:
|
|||||||
always: true
|
always: true
|
||||||
branches:
|
branches:
|
||||||
include:
|
include:
|
||||||
|
- stable-8
|
||||||
- stable-7
|
- stable-7
|
||||||
- stable-6
|
|
||||||
- cron: 0 11 * * 0
|
- cron: 0 11 * * 0
|
||||||
displayName: Weekly (old stable branches)
|
displayName: Weekly (old stable branches)
|
||||||
always: true
|
always: true
|
||||||
branches:
|
branches:
|
||||||
include:
|
include:
|
||||||
- stable-5
|
- stable-6
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
- name: checkoutPath
|
- name: checkoutPath
|
||||||
@@ -73,6 +73,19 @@ stages:
|
|||||||
- test: 3
|
- test: 3
|
||||||
- test: 4
|
- test: 4
|
||||||
- test: extra
|
- test: extra
|
||||||
|
- stage: Sanity_2_16
|
||||||
|
displayName: Sanity 2.16
|
||||||
|
dependsOn: []
|
||||||
|
jobs:
|
||||||
|
- template: templates/matrix.yml
|
||||||
|
parameters:
|
||||||
|
nameFormat: Test {0}
|
||||||
|
testFormat: 2.16/sanity/{0}
|
||||||
|
targets:
|
||||||
|
- test: 1
|
||||||
|
- test: 2
|
||||||
|
- test: 3
|
||||||
|
- test: 4
|
||||||
- stage: Sanity_2_15
|
- stage: Sanity_2_15
|
||||||
displayName: Sanity 2.15
|
displayName: Sanity 2.15
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
@@ -99,19 +112,6 @@ stages:
|
|||||||
- test: 2
|
- test: 2
|
||||||
- test: 3
|
- test: 3
|
||||||
- test: 4
|
- test: 4
|
||||||
- stage: Sanity_2_13
|
|
||||||
displayName: Sanity 2.13
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Test {0}
|
|
||||||
testFormat: 2.13/sanity/{0}
|
|
||||||
targets:
|
|
||||||
- test: 1
|
|
||||||
- test: 2
|
|
||||||
- test: 3
|
|
||||||
- test: 4
|
|
||||||
### Units
|
### Units
|
||||||
- stage: Units_devel
|
- stage: Units_devel
|
||||||
displayName: Units devel
|
displayName: Units devel
|
||||||
@@ -122,13 +122,24 @@ stages:
|
|||||||
nameFormat: Python {0}
|
nameFormat: Python {0}
|
||||||
testFormat: devel/units/{0}/1
|
testFormat: devel/units/{0}/1
|
||||||
targets:
|
targets:
|
||||||
- test: 2.7
|
|
||||||
- test: 3.6
|
|
||||||
- test: 3.7
|
- test: 3.7
|
||||||
- test: 3.8
|
- test: 3.8
|
||||||
- test: 3.9
|
- test: 3.9
|
||||||
- test: '3.10'
|
- test: '3.10'
|
||||||
- test: '3.11'
|
- test: '3.11'
|
||||||
|
- test: '3.12'
|
||||||
|
- stage: Units_2_16
|
||||||
|
displayName: Units 2.16
|
||||||
|
dependsOn: []
|
||||||
|
jobs:
|
||||||
|
- template: templates/matrix.yml
|
||||||
|
parameters:
|
||||||
|
nameFormat: Python {0}
|
||||||
|
testFormat: 2.16/units/{0}/1
|
||||||
|
targets:
|
||||||
|
- test: 2.7
|
||||||
|
- test: 3.6
|
||||||
|
- test: "3.11"
|
||||||
- stage: Units_2_15
|
- stage: Units_2_15
|
||||||
displayName: Units 2.15
|
displayName: Units 2.15
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
@@ -150,17 +161,6 @@ stages:
|
|||||||
testFormat: 2.14/units/{0}/1
|
testFormat: 2.14/units/{0}/1
|
||||||
targets:
|
targets:
|
||||||
- test: 3.9
|
- test: 3.9
|
||||||
- stage: Units_2_13
|
|
||||||
displayName: Units 2.13
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.13/units/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 2.7
|
|
||||||
- test: 3.8
|
|
||||||
|
|
||||||
## Remote
|
## Remote
|
||||||
- stage: Remote_devel_extra_vms
|
- stage: Remote_devel_extra_vms
|
||||||
@@ -171,10 +171,10 @@ stages:
|
|||||||
parameters:
|
parameters:
|
||||||
testFormat: devel/{0}
|
testFormat: devel/{0}
|
||||||
targets:
|
targets:
|
||||||
- name: Alpine 3.17
|
- name: Alpine 3.18
|
||||||
test: alpine/3.17
|
test: alpine/3.18
|
||||||
# - name: Fedora 38
|
# - name: Fedora 39
|
||||||
# test: fedora/38
|
# test: fedora/39
|
||||||
- name: Ubuntu 22.04
|
- name: Ubuntu 22.04
|
||||||
test: ubuntu/22.04
|
test: ubuntu/22.04
|
||||||
groups:
|
groups:
|
||||||
@@ -189,12 +189,30 @@ stages:
|
|||||||
targets:
|
targets:
|
||||||
- name: macOS 13.2
|
- name: macOS 13.2
|
||||||
test: macos/13.2
|
test: macos/13.2
|
||||||
|
- name: RHEL 9.3
|
||||||
|
test: rhel/9.3
|
||||||
|
- name: FreeBSD 13.2
|
||||||
|
test: freebsd/13.2
|
||||||
|
groups:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
- stage: Remote_2_16
|
||||||
|
displayName: Remote 2.16
|
||||||
|
dependsOn: []
|
||||||
|
jobs:
|
||||||
|
- template: templates/matrix.yml
|
||||||
|
parameters:
|
||||||
|
testFormat: 2.16/{0}
|
||||||
|
targets:
|
||||||
|
#- name: macOS 13.2
|
||||||
|
# test: macos/13.2
|
||||||
- name: RHEL 9.2
|
- name: RHEL 9.2
|
||||||
test: rhel/9.2
|
test: rhel/9.2
|
||||||
- name: RHEL 8.8
|
- name: RHEL 8.8
|
||||||
test: rhel/8.8
|
test: rhel/8.8
|
||||||
- name: FreeBSD 13.2
|
#- name: FreeBSD 13.2
|
||||||
test: freebsd/13.2
|
# test: freebsd/13.2
|
||||||
groups:
|
groups:
|
||||||
- 1
|
- 1
|
||||||
- 2
|
- 2
|
||||||
@@ -213,10 +231,10 @@ stages:
|
|||||||
test: rhel/8.7
|
test: rhel/8.7
|
||||||
- name: RHEL 7.9
|
- name: RHEL 7.9
|
||||||
test: rhel/7.9
|
test: rhel/7.9
|
||||||
- name: FreeBSD 13.1
|
# - name: FreeBSD 13.1
|
||||||
test: freebsd/13.1
|
# test: freebsd/13.1
|
||||||
- name: FreeBSD 12.4
|
# - name: FreeBSD 12.4
|
||||||
test: freebsd/12.4
|
# test: freebsd/12.4
|
||||||
groups:
|
groups:
|
||||||
- 1
|
- 1
|
||||||
- 2
|
- 2
|
||||||
@@ -231,26 +249,10 @@ stages:
|
|||||||
targets:
|
targets:
|
||||||
- name: RHEL 9.0
|
- name: RHEL 9.0
|
||||||
test: rhel/9.0
|
test: rhel/9.0
|
||||||
- name: FreeBSD 12.3
|
#- name: macOS 12.0
|
||||||
test: freebsd/12.3
|
# test: macos/12.0
|
||||||
groups:
|
#- name: FreeBSD 12.4
|
||||||
- 1
|
# test: freebsd/12.4
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- stage: Remote_2_13
|
|
||||||
displayName: Remote 2.13
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.13/{0}
|
|
||||||
targets:
|
|
||||||
- name: macOS 12.0
|
|
||||||
test: macos/12.0
|
|
||||||
- name: RHEL 8.5
|
|
||||||
test: rhel/8.5
|
|
||||||
- name: FreeBSD 13.0
|
|
||||||
test: freebsd/13.0
|
|
||||||
groups:
|
groups:
|
||||||
- 1
|
- 1
|
||||||
- 2
|
- 2
|
||||||
@@ -265,10 +267,8 @@ stages:
|
|||||||
parameters:
|
parameters:
|
||||||
testFormat: devel/linux/{0}
|
testFormat: devel/linux/{0}
|
||||||
targets:
|
targets:
|
||||||
- name: Fedora 38
|
- name: Fedora 39
|
||||||
test: fedora38
|
test: fedora39
|
||||||
- name: openSUSE 15
|
|
||||||
test: opensuse15
|
|
||||||
- name: Ubuntu 20.04
|
- name: Ubuntu 20.04
|
||||||
test: ubuntu2004
|
test: ubuntu2004
|
||||||
- name: Ubuntu 22.04
|
- name: Ubuntu 22.04
|
||||||
@@ -279,6 +279,22 @@ stages:
|
|||||||
- 1
|
- 1
|
||||||
- 2
|
- 2
|
||||||
- 3
|
- 3
|
||||||
|
- stage: Docker_2_16
|
||||||
|
displayName: Docker 2.16
|
||||||
|
dependsOn: []
|
||||||
|
jobs:
|
||||||
|
- template: templates/matrix.yml
|
||||||
|
parameters:
|
||||||
|
testFormat: 2.16/linux/{0}
|
||||||
|
targets:
|
||||||
|
- name: Fedora 38
|
||||||
|
test: fedora38
|
||||||
|
- name: openSUSE 15
|
||||||
|
test: opensuse15
|
||||||
|
groups:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
- stage: Docker_2_15
|
- stage: Docker_2_15
|
||||||
displayName: Docker 2.15
|
displayName: Docker 2.15
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
@@ -303,24 +319,6 @@ stages:
|
|||||||
parameters:
|
parameters:
|
||||||
testFormat: 2.14/linux/{0}
|
testFormat: 2.14/linux/{0}
|
||||||
targets:
|
targets:
|
||||||
- name: Fedora 36
|
|
||||||
test: fedora36
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- stage: Docker_2_13
|
|
||||||
displayName: Docker 2.13
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.13/linux/{0}
|
|
||||||
targets:
|
|
||||||
- name: Fedora 35
|
|
||||||
test: fedora35
|
|
||||||
- name: openSUSE 15 py2
|
|
||||||
test: opensuse15py2
|
|
||||||
- name: Alpine 3
|
- name: Alpine 3
|
||||||
test: alpine3
|
test: alpine3
|
||||||
groups:
|
groups:
|
||||||
@@ -343,8 +341,6 @@ stages:
|
|||||||
test: debian-bookworm/3.11
|
test: debian-bookworm/3.11
|
||||||
- name: ArchLinux
|
- name: ArchLinux
|
||||||
test: archlinux/3.11
|
test: archlinux/3.11
|
||||||
- name: CentOS Stream 8
|
|
||||||
test: centos-stream8/3.9
|
|
||||||
groups:
|
groups:
|
||||||
- 1
|
- 1
|
||||||
- 2
|
- 2
|
||||||
@@ -360,7 +356,19 @@ stages:
|
|||||||
nameFormat: Python {0}
|
nameFormat: Python {0}
|
||||||
testFormat: devel/generic/{0}/1
|
testFormat: devel/generic/{0}/1
|
||||||
targets:
|
targets:
|
||||||
- test: 2.7
|
- test: '3.7'
|
||||||
|
- test: '3.12'
|
||||||
|
- stage: Generic_2_16
|
||||||
|
displayName: Generic 2.16
|
||||||
|
dependsOn: []
|
||||||
|
jobs:
|
||||||
|
- template: templates/matrix.yml
|
||||||
|
parameters:
|
||||||
|
nameFormat: Python {0}
|
||||||
|
testFormat: 2.16/generic/{0}/1
|
||||||
|
targets:
|
||||||
|
- test: '2.7'
|
||||||
|
- test: '3.6'
|
||||||
- test: '3.11'
|
- test: '3.11'
|
||||||
- stage: Generic_2_15
|
- stage: Generic_2_15
|
||||||
displayName: Generic 2.15
|
displayName: Generic 2.15
|
||||||
@@ -371,7 +379,7 @@ stages:
|
|||||||
nameFormat: Python {0}
|
nameFormat: Python {0}
|
||||||
testFormat: 2.15/generic/{0}/1
|
testFormat: 2.15/generic/{0}/1
|
||||||
targets:
|
targets:
|
||||||
- test: 3.9
|
- test: '3.9'
|
||||||
- stage: Generic_2_14
|
- stage: Generic_2_14
|
||||||
displayName: Generic 2.14
|
displayName: Generic 2.14
|
||||||
dependsOn: []
|
dependsOn: []
|
||||||
@@ -382,42 +390,32 @@ stages:
|
|||||||
testFormat: 2.14/generic/{0}/1
|
testFormat: 2.14/generic/{0}/1
|
||||||
targets:
|
targets:
|
||||||
- test: '3.10'
|
- test: '3.10'
|
||||||
- stage: Generic_2_13
|
|
||||||
displayName: Generic 2.13
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.13/generic/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 3.9
|
|
||||||
|
|
||||||
- stage: Summary
|
- stage: Summary
|
||||||
condition: succeededOrFailed()
|
condition: succeededOrFailed()
|
||||||
dependsOn:
|
dependsOn:
|
||||||
- Sanity_devel
|
- Sanity_devel
|
||||||
- Sanity_2_13
|
- Sanity_2_16
|
||||||
- Sanity_2_14
|
|
||||||
- Sanity_2_15
|
- Sanity_2_15
|
||||||
|
- Sanity_2_14
|
||||||
- Units_devel
|
- Units_devel
|
||||||
- Units_2_13
|
- Units_2_16
|
||||||
- Units_2_14
|
|
||||||
- Units_2_15
|
- Units_2_15
|
||||||
|
- Units_2_14
|
||||||
- Remote_devel_extra_vms
|
- Remote_devel_extra_vms
|
||||||
- Remote_devel
|
- Remote_devel
|
||||||
- Remote_2_13
|
- Remote_2_16
|
||||||
- Remote_2_14
|
|
||||||
- Remote_2_15
|
- Remote_2_15
|
||||||
|
- Remote_2_14
|
||||||
- Docker_devel
|
- Docker_devel
|
||||||
- Docker_2_13
|
- Docker_2_16
|
||||||
- Docker_2_14
|
|
||||||
- Docker_2_15
|
- Docker_2_15
|
||||||
|
- Docker_2_14
|
||||||
- Docker_community_devel
|
- Docker_community_devel
|
||||||
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
|
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
|
||||||
# - Generic_devel
|
# - Generic_devel
|
||||||
# - Generic_2_13
|
# - Generic_2_16
|
||||||
# - Generic_2_14
|
|
||||||
# - Generic_2_15
|
# - Generic_2_15
|
||||||
|
# - Generic_2_14
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/coverage.yml
|
- template: templates/coverage.yml
|
||||||
|
|||||||
33
.github/BOTMETA.yml
vendored
33
.github/BOTMETA.yml
vendored
@@ -249,9 +249,11 @@ files:
|
|||||||
labels: onepassword
|
labels: onepassword
|
||||||
maintainers: samdoran
|
maintainers: samdoran
|
||||||
$lookups/onepassword.py:
|
$lookups/onepassword.py:
|
||||||
maintainers: azenk scottsb
|
ignore: scottsb
|
||||||
|
maintainers: azenk
|
||||||
$lookups/onepassword_raw.py:
|
$lookups/onepassword_raw.py:
|
||||||
maintainers: azenk scottsb
|
ignore: scottsb
|
||||||
|
maintainers: azenk
|
||||||
$lookups/passwordstore.py: {}
|
$lookups/passwordstore.py: {}
|
||||||
$lookups/random_pet.py:
|
$lookups/random_pet.py:
|
||||||
maintainers: Akasurde
|
maintainers: Akasurde
|
||||||
@@ -276,6 +278,8 @@ files:
|
|||||||
$module_utils/gconftool2.py:
|
$module_utils/gconftool2.py:
|
||||||
labels: gconftool2
|
labels: gconftool2
|
||||||
maintainers: russoz
|
maintainers: russoz
|
||||||
|
$module_utils/gio_mime.py:
|
||||||
|
maintainers: russoz
|
||||||
$module_utils/gitlab.py:
|
$module_utils/gitlab.py:
|
||||||
keywords: gitlab source_control
|
keywords: gitlab source_control
|
||||||
labels: gitlab
|
labels: gitlab
|
||||||
@@ -328,6 +332,9 @@ files:
|
|||||||
$module_utils/scaleway.py:
|
$module_utils/scaleway.py:
|
||||||
labels: cloud scaleway
|
labels: cloud scaleway
|
||||||
maintainers: $team_scaleway
|
maintainers: $team_scaleway
|
||||||
|
$module_utils/snap.py:
|
||||||
|
labels: snap
|
||||||
|
maintainers: russoz
|
||||||
$module_utils/ssh.py:
|
$module_utils/ssh.py:
|
||||||
maintainers: russoz
|
maintainers: russoz
|
||||||
$module_utils/storage/hpe3par/hpe3par.py:
|
$module_utils/storage/hpe3par/hpe3par.py:
|
||||||
@@ -512,6 +519,8 @@ files:
|
|||||||
$modules/gem.py:
|
$modules/gem.py:
|
||||||
labels: gem
|
labels: gem
|
||||||
maintainers: $team_ansible_core johanwiren
|
maintainers: $team_ansible_core johanwiren
|
||||||
|
$modules/gio_mime.py:
|
||||||
|
maintainers: russoz
|
||||||
$modules/git_config.py:
|
$modules/git_config.py:
|
||||||
maintainers: djmattyg007 mgedmin
|
maintainers: djmattyg007 mgedmin
|
||||||
$modules/github_:
|
$modules/github_:
|
||||||
@@ -633,6 +642,7 @@ files:
|
|||||||
maintainers: bregman-arie
|
maintainers: bregman-arie
|
||||||
$modules/ipa_:
|
$modules/ipa_:
|
||||||
maintainers: $team_ipa
|
maintainers: $team_ipa
|
||||||
|
ignore: fxfitz
|
||||||
$modules/ipbase_info.py:
|
$modules/ipbase_info.py:
|
||||||
maintainers: dominikkukacka
|
maintainers: dominikkukacka
|
||||||
$modules/ipa_pwpolicy.py:
|
$modules/ipa_pwpolicy.py:
|
||||||
@@ -669,7 +679,9 @@ files:
|
|||||||
labels: jboss
|
labels: jboss
|
||||||
maintainers: $team_jboss jhoekx
|
maintainers: $team_jboss jhoekx
|
||||||
$modules/jenkins_build.py:
|
$modules/jenkins_build.py:
|
||||||
maintainers: brettmilford unnecessary-username
|
maintainers: brettmilford unnecessary-username juanmcasanova
|
||||||
|
$modules/jenkins_build_info.py:
|
||||||
|
maintainers: juanmcasanova
|
||||||
$modules/jenkins_job.py:
|
$modules/jenkins_job.py:
|
||||||
maintainers: sermilrod
|
maintainers: sermilrod
|
||||||
$modules/jenkins_job_info.py:
|
$modules/jenkins_job_info.py:
|
||||||
@@ -696,6 +708,8 @@ files:
|
|||||||
maintainers: mattock
|
maintainers: mattock
|
||||||
$modules/keycloak_authz_permission.py:
|
$modules/keycloak_authz_permission.py:
|
||||||
maintainers: mattock
|
maintainers: mattock
|
||||||
|
$modules/keycloak_authz_custom_policy.py:
|
||||||
|
maintainers: mattock
|
||||||
$modules/keycloak_authz_permission_info.py:
|
$modules/keycloak_authz_permission_info.py:
|
||||||
maintainers: mattock
|
maintainers: mattock
|
||||||
$modules/keycloak_client_rolemapping.py:
|
$modules/keycloak_client_rolemapping.py:
|
||||||
@@ -716,6 +730,8 @@ files:
|
|||||||
maintainers: kris2kris
|
maintainers: kris2kris
|
||||||
$modules/keycloak_realm_info.py:
|
$modules/keycloak_realm_info.py:
|
||||||
maintainers: fynncfchen
|
maintainers: fynncfchen
|
||||||
|
$modules/keycloak_realm_key.py:
|
||||||
|
maintainers: mattock
|
||||||
$modules/keycloak_role.py:
|
$modules/keycloak_role.py:
|
||||||
maintainers: laurpaum
|
maintainers: laurpaum
|
||||||
$modules/keycloak_user.py:
|
$modules/keycloak_user.py:
|
||||||
@@ -937,7 +953,7 @@ files:
|
|||||||
labels: pagerduty
|
labels: pagerduty
|
||||||
maintainers: suprememoocow thaumos
|
maintainers: suprememoocow thaumos
|
||||||
$modules/pagerduty_alert.py:
|
$modules/pagerduty_alert.py:
|
||||||
maintainers: ApsOps
|
maintainers: ApsOps xshen1
|
||||||
$modules/pagerduty_change.py:
|
$modules/pagerduty_change.py:
|
||||||
maintainers: adamvaughan
|
maintainers: adamvaughan
|
||||||
$modules/pagerduty_user.py:
|
$modules/pagerduty_user.py:
|
||||||
@@ -980,6 +996,9 @@ files:
|
|||||||
maintainers: $team_solaris dermute
|
maintainers: $team_solaris dermute
|
||||||
$modules/pmem.py:
|
$modules/pmem.py:
|
||||||
maintainers: mizumm
|
maintainers: mizumm
|
||||||
|
$modules/pnpm.py:
|
||||||
|
ignore: chrishoffman
|
||||||
|
maintainers: aretrosen
|
||||||
$modules/portage.py:
|
$modules/portage.py:
|
||||||
ignore: sayap
|
ignore: sayap
|
||||||
labels: portage
|
labels: portage
|
||||||
@@ -1195,6 +1214,8 @@ files:
|
|||||||
ignore: ryansb
|
ignore: ryansb
|
||||||
$modules/shutdown.py:
|
$modules/shutdown.py:
|
||||||
maintainers: nitzmahone samdoran aminvakil
|
maintainers: nitzmahone samdoran aminvakil
|
||||||
|
$modules/simpleinit_msb.py:
|
||||||
|
maintainers: vaygr
|
||||||
$modules/sl_vm.py:
|
$modules/sl_vm.py:
|
||||||
maintainers: mcltn
|
maintainers: mcltn
|
||||||
$modules/slack.py:
|
$modules/slack.py:
|
||||||
@@ -1207,7 +1228,7 @@ files:
|
|||||||
maintainers: $team_solaris
|
maintainers: $team_solaris
|
||||||
$modules/snap.py:
|
$modules/snap.py:
|
||||||
labels: snap
|
labels: snap
|
||||||
maintainers: angristan vcarceler
|
maintainers: angristan vcarceler russoz
|
||||||
$modules/snap_alias.py:
|
$modules/snap_alias.py:
|
||||||
labels: snap
|
labels: snap
|
||||||
maintainers: russoz
|
maintainers: russoz
|
||||||
@@ -1413,7 +1434,7 @@ macros:
|
|||||||
team_gitlab: Lunik Shaps marwatk waheedi zanssa scodeman metanovii sh0shin nejch lgatellier suukit
|
team_gitlab: Lunik Shaps marwatk waheedi zanssa scodeman metanovii sh0shin nejch lgatellier suukit
|
||||||
team_hpux: bcoca davx8342
|
team_hpux: bcoca davx8342
|
||||||
team_huawei: QijunPan TommyLike edisonxiang freesky-edward hwDCN niuzhenguo xuxiaowei0512 yanzhangi zengchen1024 zhongjun2
|
team_huawei: QijunPan TommyLike edisonxiang freesky-edward hwDCN niuzhenguo xuxiaowei0512 yanzhangi zengchen1024 zhongjun2
|
||||||
team_ipa: Akasurde Nosmoht fxfitz justchris1
|
team_ipa: Akasurde Nosmoht justchris1
|
||||||
team_jboss: Wolfant jairojunior wbrefvem
|
team_jboss: Wolfant jairojunior wbrefvem
|
||||||
team_keycloak: eikef ndclt mattock
|
team_keycloak: eikef ndclt mattock
|
||||||
team_linode: InTheCloudDan decentral1se displague rmcintosh Charliekenney23 LBGarber
|
team_linode: InTheCloudDan decentral1se displague rmcintosh Charliekenney23 LBGarber
|
||||||
|
|||||||
47
.github/workflows/ansible-test.yml
vendored
47
.github/workflows/ansible-test.yml
vendored
@@ -31,6 +31,7 @@ jobs:
|
|||||||
ansible:
|
ansible:
|
||||||
- '2.11'
|
- '2.11'
|
||||||
- '2.12'
|
- '2.12'
|
||||||
|
- '2.13'
|
||||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
||||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
||||||
# image for these stable branches. The list of branches where this is necessary will
|
# image for these stable branches. The list of branches where this is necessary will
|
||||||
@@ -80,6 +81,10 @@ jobs:
|
|||||||
python: '2.6'
|
python: '2.6'
|
||||||
- ansible: '2.12'
|
- ansible: '2.12'
|
||||||
python: '3.8'
|
python: '3.8'
|
||||||
|
- ansible: '2.13'
|
||||||
|
python: '2.7'
|
||||||
|
- ansible: '2.13'
|
||||||
|
python: '3.8'
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: >-
|
- name: >-
|
||||||
@@ -211,6 +216,48 @@ jobs:
|
|||||||
# docker: default
|
# docker: default
|
||||||
# python: '3.8'
|
# python: '3.8'
|
||||||
# target: azp/generic/1/
|
# target: azp/generic/1/
|
||||||
|
# 2.13
|
||||||
|
- ansible: '2.13'
|
||||||
|
docker: fedora35
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/1/
|
||||||
|
- ansible: '2.13'
|
||||||
|
docker: fedora35
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/2/
|
||||||
|
- ansible: '2.13'
|
||||||
|
docker: fedora35
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/3/
|
||||||
|
- ansible: '2.13'
|
||||||
|
docker: opensuse15py2
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/1/
|
||||||
|
- ansible: '2.13'
|
||||||
|
docker: opensuse15py2
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/2/
|
||||||
|
- ansible: '2.13'
|
||||||
|
docker: opensuse15py2
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/3/
|
||||||
|
- ansible: '2.13'
|
||||||
|
docker: alpine3
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/1/
|
||||||
|
- ansible: '2.13'
|
||||||
|
docker: alpine3
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/2/
|
||||||
|
- ansible: '2.13'
|
||||||
|
docker: alpine3
|
||||||
|
python: ''
|
||||||
|
target: azp/posix/3/
|
||||||
|
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
|
||||||
|
# - ansible: '2.13'
|
||||||
|
# docker: default
|
||||||
|
# python: '3.9'
|
||||||
|
# target: azp/generic/1/
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: >-
|
- name: >-
|
||||||
|
|||||||
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@@ -24,13 +24,13 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v2
|
uses: github/codeql-action/init@v3
|
||||||
with:
|
with:
|
||||||
languages: python
|
languages: python
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v2
|
uses: github/codeql-action/analyze@v3
|
||||||
|
|||||||
20
.github/workflows/import-galaxy.yml
vendored
Normal file
20
.github/workflows/import-galaxy.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
---
|
||||||
|
# Copyright (c) Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
name: import-galaxy
|
||||||
|
'on':
|
||||||
|
# Run CI against all pushes (direct commits, also merged PRs) to main, and all Pull Requests
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- stable-*
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
import-galaxy:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
name: Test to import built collection artifact with Galaxy importer
|
||||||
|
uses: ansible-community/github-action-test-galaxy-import/.github/workflows/test-galaxy-import.yml@main
|
||||||
11
.github/workflows/reuse.yml
vendored
11
.github/workflows/reuse.yml
vendored
@@ -22,14 +22,9 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha || '' }}
|
ref: ${{ github.event.pull_request.head.sha || '' }}
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: REUSE Compliance Check
|
||||||
run: |
|
uses: fsfe/reuse-action@v2
|
||||||
pip install reuse
|
|
||||||
|
|
||||||
- name: Check REUSE compliance
|
|
||||||
run: |
|
|
||||||
reuse lint
|
|
||||||
|
|||||||
245
CHANGELOG.rst
245
CHANGELOG.rst
@@ -6,6 +6,249 @@ Community General Release Notes
|
|||||||
|
|
||||||
This changelog describes changes after version 6.0.0.
|
This changelog describes changes after version 6.0.0.
|
||||||
|
|
||||||
|
v7.5.4
|
||||||
|
======
|
||||||
|
|
||||||
|
Release Summary
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Regular bugfix release.
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- homebrew - detect already installed formulae and casks using JSON output from ``brew info`` (https://github.com/ansible-collections/community.general/issues/864).
|
||||||
|
- ipa_otptoken - the module expect ``ipatokendisabled`` as string but the ``ipatokendisabled`` value is returned as a boolean (https://github.com/ansible-collections/community.general/pull/7795).
|
||||||
|
- ldap - previously the order number (if present) was expected to follow an equals sign in the DN. This makes it so the order number string is identified correctly anywhere within the DN (https://github.com/ansible-collections/community.general/issues/7646).
|
||||||
|
- mssql_script - make the module work with Python 2 (https://github.com/ansible-collections/community.general/issues/7818, https://github.com/ansible-collections/community.general/pull/7821).
|
||||||
|
- nmcli - fix ``connection.slave-type`` wired to ``bond`` and not with parameter ``slave_type`` in case of connection type ``wifi`` (https://github.com/ansible-collections/community.general/issues/7389).
|
||||||
|
|
||||||
|
v7.5.3
|
||||||
|
======
|
||||||
|
|
||||||
|
Release Summary
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Regular bugfix release.
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- keycloak_identity_provider - ``mappers`` processing was not idempotent if the mappers configuration list had not been sorted by name (in ascending order). Fix resolves the issue by sorting mappers in the desired state using the same key which is used for obtaining existing state (https://github.com/ansible-collections/community.general/pull/7418).
|
||||||
|
- keycloak_identity_provider - it was not possible to reconfigure (add, remove) ``mappers`` once they were created initially. Removal was ignored, adding new ones resulted in dropping the pre-existing unmodified mappers. Fix resolves the issue by supplying correct input to the internal update call (https://github.com/ansible-collections/community.general/pull/7418).
|
||||||
|
- keycloak_user - when ``force`` is set, but user does not exist, do not try to delete it (https://github.com/ansible-collections/community.general/pull/7696).
|
||||||
|
- statusio_maintenance - fix error caused by incorrectly formed API data payload. Was raising "Failed to create maintenance HTTP Error 400 Bad Request" caused by bad data type for date/time and deprecated dict keys (https://github.com/ansible-collections/community.general/pull/7754).
|
||||||
|
|
||||||
|
v7.5.2
|
||||||
|
======
|
||||||
|
|
||||||
|
Release Summary
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Regular bugfix release.
|
||||||
|
|
||||||
|
Minor Changes
|
||||||
|
-------------
|
||||||
|
|
||||||
|
- elastic callback plugin - close elastic client to not leak resources (https://github.com/ansible-collections/community.general/pull/7517).
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- cloudflare_dns - fix Cloudflare lookup of SHFP records (https://github.com/ansible-collections/community.general/issues/7652).
|
||||||
|
- interface_files - also consider ``address_family`` when changing ``option=method`` (https://github.com/ansible-collections/community.general/issues/7610, https://github.com/ansible-collections/community.general/pull/7612).
|
||||||
|
- irc - replace ``ssl.wrap_socket`` that was removed from Python 3.12 with code for creating a proper SSL context (https://github.com/ansible-collections/community.general/pull/7542).
|
||||||
|
- keycloak_* - fix Keycloak API client to quote ``/`` properly (https://github.com/ansible-collections/community.general/pull/7641).
|
||||||
|
- keycloak_authz_permission - resource payload variable for scope-based permission was constructed as a string, when it needs to be a list, even for a single item (https://github.com/ansible-collections/community.general/issues/7151).
|
||||||
|
- log_entries callback plugin - replace ``ssl.wrap_socket`` that was removed from Python 3.12 with code for creating a proper SSL context (https://github.com/ansible-collections/community.general/pull/7542).
|
||||||
|
- lvol - test for output messages in both ``stdout`` and ``stderr`` (https://github.com/ansible-collections/community.general/pull/7601, https://github.com/ansible-collections/community.general/issues/7182).
|
||||||
|
- ocapi_utils, oci_utils, redfish_utils module utils - replace ``type()`` calls with ``isinstance()`` calls (https://github.com/ansible-collections/community.general/pull/7501).
|
||||||
|
- onepassword lookup plugin - field and section titles are now case insensitive when using op CLI version two or later. This matches the behavior of version one (https://github.com/ansible-collections/community.general/pull/7564).
|
||||||
|
- pipx module utils - change the CLI argument formatter for the ``pip_args`` parameter (https://github.com/ansible-collections/community.general/issues/7497, https://github.com/ansible-collections/community.general/pull/7506).
|
||||||
|
- redhat_subscription - use the D-Bus registration on RHEL 7 only on 7.4 and
|
||||||
|
greater; older versions of RHEL 7 do not have it
|
||||||
|
(https://github.com/ansible-collections/community.general/issues/7622,
|
||||||
|
https://github.com/ansible-collections/community.general/pull/7624).
|
||||||
|
- terraform - fix multiline string handling in complex variables (https://github.com/ansible-collections/community.general/pull/7535).
|
||||||
|
|
||||||
|
v7.5.1
|
||||||
|
======
|
||||||
|
|
||||||
|
Release Summary
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Regular bugfix release.
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- composer - fix impossible to run ``working_dir`` dependent commands. The module was throwing an error when trying to run a ``working_dir`` dependent command, because it tried to get the command help without passing the ``working_dir`` (https://github.com/ansible-collections/community.general/issues/3787).
|
||||||
|
- github_deploy_key - fix pagination behaviour causing a crash when only a single page of deploy keys exist (https://github.com/ansible-collections/community.general/pull/7375).
|
||||||
|
- gitlab_group_members - fix gitlab constants call in ``gitlab_group_members`` module (https://github.com/ansible-collections/community.general/issues/7467).
|
||||||
|
- gitlab_project_members - fix gitlab constants call in ``gitlab_project_members`` module (https://github.com/ansible-collections/community.general/issues/7467).
|
||||||
|
- gitlab_protected_branches - fix gitlab constants call in ``gitlab_protected_branches`` module (https://github.com/ansible-collections/community.general/issues/7467).
|
||||||
|
- gitlab_user - fix gitlab constants call in ``gitlab_user`` module (https://github.com/ansible-collections/community.general/issues/7467).
|
||||||
|
- kernel_blacklist - simplified the mechanism to update the file, fixing the error (https://github.com/ansible-collections/community.general/pull/7382, https://github.com/ansible-collections/community.general/issues/7362).
|
||||||
|
- memset module utils - make compatible with ansible-core 2.17 (https://github.com/ansible-collections/community.general/pull/7379).
|
||||||
|
- proxmox_pool_member - absent state for type VM did not delete VMs from the pools (https://github.com/ansible-collections/community.general/pull/7464).
|
||||||
|
- redfish_command - fix usage of message parsing in ``SimpleUpdate`` and ``MultipartHTTPPushUpdate`` commands to treat the lack of a ``MessageId`` as no message (https://github.com/ansible-collections/community.general/issues/7465, https://github.com/ansible-collections/community.general/pull/7471).
|
||||||
|
- redhat_subscription - use the right D-Bus options for the consumer type when
|
||||||
|
registering a RHEL system older than 9 or a RHEL 9 system older than 9.2
|
||||||
|
and using ``consumer_type``
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/7378).
|
||||||
|
- selective callback plugin - fix length of task name lines in output always being 3 characters longer than desired (https://github.com/ansible-collections/community.general/pull/7374).
|
||||||
|
|
||||||
|
v7.5.0
|
||||||
|
======
|
||||||
|
|
||||||
|
Release Summary
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Regular bugfix and feature release.
|
||||||
|
|
||||||
|
Please note that this is the last minor 7.x.0 release. Further releases
|
||||||
|
with major version 7 will be bugfix releases 7.5.y.
|
||||||
|
|
||||||
|
|
||||||
|
Minor Changes
|
||||||
|
-------------
|
||||||
|
|
||||||
|
- cargo - add option ``executable``, which allows user to specify path to the cargo binary (https://github.com/ansible-collections/community.general/pull/7352).
|
||||||
|
- cargo - add option ``locked`` which allows user to specify install the locked version of dependency instead of latest compatible version (https://github.com/ansible-collections/community.general/pull/6134).
|
||||||
|
- dig lookup plugin - add TCP option to enable the use of TCP connection during DNS lookup (https://github.com/ansible-collections/community.general/pull/7343).
|
||||||
|
- gitlab_group - add option ``force_delete`` (default: false) which allows delete group even if projects exists in it (https://github.com/ansible-collections/community.general/pull/7364).
|
||||||
|
- ini_file - add ``ignore_spaces`` option (https://github.com/ansible-collections/community.general/pull/7273).
|
||||||
|
- newrelic_deployment - add option ``app_name_exact_match``, which filters results for the exact app_name provided (https://github.com/ansible-collections/community.general/pull/7355).
|
||||||
|
- onepassword lookup plugin - introduce ``account_id`` option which allows specifying which account to use (https://github.com/ansible-collections/community.general/pull/7308).
|
||||||
|
- onepassword_raw lookup plugin - introduce ``account_id`` option which allows specifying which account to use (https://github.com/ansible-collections/community.general/pull/7308).
|
||||||
|
- parted - on resize, use ``--fix`` option if available (https://github.com/ansible-collections/community.general/pull/7304).
|
||||||
|
- pnpm - set correct version when state is latest or version is not mentioned. Resolves previous idempotency problem (https://github.com/ansible-collections/community.general/pull/7339).
|
||||||
|
- proxmox - add ``vmid`` (and ``taskid`` when possible) to return values (https://github.com/ansible-collections/community.general/pull/7263).
|
||||||
|
- random_string - added new ``ignore_similar_chars`` and ``similar_chars`` option to ignore certain chars (https://github.com/ansible-collections/community.general/pull/7242).
|
||||||
|
- redfish_command - add new option ``update_oem_params`` for the ``MultipartHTTPPushUpdate`` command (https://github.com/ansible-collections/community.general/issues/7331).
|
||||||
|
- redfish_config - add ``CreateVolume`` command to allow creation of volumes on servers (https://github.com/ansible-collections/community.general/pull/6813).
|
||||||
|
- redfish_config - adding ``SetSecureBoot`` command (https://github.com/ansible-collections/community.general/pull/7129).
|
||||||
|
- redfish_info - add support for ``GetBiosRegistries`` command (https://github.com/ansible-collections/community.general/pull/7144).
|
||||||
|
- redfish_info - adds ``LinkStatus`` to NIC inventory (https://github.com/ansible-collections/community.general/pull/7318).
|
||||||
|
- redis_info - refactor the redis_info module to use the redis module_utils enabling to pass TLS parameters to the Redis client (https://github.com/ansible-collections/community.general/pull/7267).
|
||||||
|
- supervisorctl - allow to stop matching running processes before removing them with ``stop_before_removing=true`` (https://github.com/ansible-collections/community.general/pull/7284).
|
||||||
|
|
||||||
|
Deprecated Features
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
- The next major release, community.general 8.0.0, will drop support for ansible-core 2.11 and 2.12, which have been End of Life for some time now. This means that this collection no longer supports Python 2.6 on the target. Individual content might still work with unsupported ansible-core versions, but that can change at any time. Also please note that from now on, for every new major community.general release, we will drop support for all ansible-core versions that have been End of Life for more than a few weeks on the date of the major release (https://github.com/ansible-community/community-topics/discussions/271, https://github.com/ansible-collections/community.general/pull/7259).
|
||||||
|
- redfish_info, redfish_config, redfish_command - the default value ``10`` for the ``timeout`` option is deprecated and will change to ``60`` in community.general 9.0.0 (https://github.com/ansible-collections/community.general/pull/7295).
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- gitlab_group_variable - deleted all variables when used with ``purge=true`` due to missing ``raw`` property in KNOWN attributes (https://github.com/ansible-collections/community.general/issues/7250).
|
||||||
|
- gitlab_project_variable - deleted all variables when used with ``purge=true`` due to missing ``raw`` property in KNOWN attributes (https://github.com/ansible-collections/community.general/issues/7250).
|
||||||
|
- ldap_search - fix string normalization and the ``base64_attributes`` option on Python 3 (https://github.com/ansible-collections/community.general/issues/5704, https://github.com/ansible-collections/community.general/pull/7264).
|
||||||
|
- lxc connection plugin - properly evaluate options (https://github.com/ansible-collections/community.general/pull/7369).
|
||||||
|
- mail - skip headers containing equals characters due to missing ``maxsplit`` on header key/value parsing (https://github.com/ansible-collections/community.general/pull/7303).
|
||||||
|
- nmap inventory plugin - fix ``get_option`` calls (https://github.com/ansible-collections/community.general/pull/7323).
|
||||||
|
- onepassword - fix KeyError exception when trying to access value of a field that is not filled out in OnePassword item (https://github.com/ansible-collections/community.general/pull/7241).
|
||||||
|
- snap - change the change detection mechanism from "parsing installation" to "comparing end state with initial state" (https://github.com/ansible-collections/community.general/pull/7340, https://github.com/ansible-collections/community.general/issues/7265).
|
||||||
|
- terraform - prevents ``-backend-config`` option double encapsulating with ``shlex_quote`` function. (https://github.com/ansible-collections/community.general/pull/7301).
|
||||||
|
|
||||||
|
New Modules
|
||||||
|
-----------
|
||||||
|
|
||||||
|
- consul_role - Manipulate Consul roles
|
||||||
|
- gio_mime - Set default handler for MIME type, for applications using Gnome GIO
|
||||||
|
- keycloak_authz_custom_policy - Allows administration of Keycloak client custom Javascript policies via Keycloak API
|
||||||
|
- keycloak_realm_key - Allows administration of Keycloak realm keys via Keycloak API
|
||||||
|
- simpleinit_msb - Manage services on Source Mage GNU/Linux
|
||||||
|
|
||||||
|
v7.4.0
|
||||||
|
======
|
||||||
|
|
||||||
|
Release Summary
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Bugfix and feature release.
|
||||||
|
|
||||||
|
Minor Changes
|
||||||
|
-------------
|
||||||
|
|
||||||
|
- cobbler inventory plugin - add ``exclude_mgmt_classes`` and ``include_mgmt_classes`` options to exclude or include hosts based on management classes (https://github.com/ansible-collections/community.general/pull/7184).
|
||||||
|
- cpanm - minor refactor when creating the ``CmdRunner`` object (https://github.com/ansible-collections/community.general/pull/7231).
|
||||||
|
- gitlab_group_variable - add support for ``raw`` variables suboption (https://github.com/ansible-collections/community.general/pull/7132).
|
||||||
|
- gitlab_project_variable - add support for ``raw`` variables suboption (https://github.com/ansible-collections/community.general/pull/7132).
|
||||||
|
- jenkins_build - add new ``detach`` option, which allows the module to exit successfully as long as the build is created (default functionality is still waiting for the build to end before exiting) (https://github.com/ansible-collections/community.general/pull/7204).
|
||||||
|
- jenkins_build - add new ``time_between_checks`` option, which allows to configure the wait time between requests to the Jenkins server (https://github.com/ansible-collections/community.general/pull/7204).
|
||||||
|
- make - allows ``params`` to be used without value (https://github.com/ansible-collections/community.general/pull/7180).
|
||||||
|
- nmap inventory plugin - now has a ``use_arp_ping`` option to allow the user to disable the default ARP ping query for a more reliable form (https://github.com/ansible-collections/community.general/pull/7119).
|
||||||
|
- pagerduty - adds in option to use v2 API for creating pagerduty incidents (https://github.com/ansible-collections/community.general/issues/6151)
|
||||||
|
- pritunl module utils - ensure ``validate_certs`` parameter is honoured in all methods (https://github.com/ansible-collections/community.general/pull/7156).
|
||||||
|
- redfish_info - report ``Id`` in the output of ``GetManagerInventory`` (https://github.com/ansible-collections/community.general/pull/7140).
|
||||||
|
- redfish_utils module utils - support ``Volumes`` in response for ``GetDiskInventory`` (https://github.com/ansible-collections/community.general/pull/6819).
|
||||||
|
- unixy callback plugin - add support for ``check_mode_markers`` option (https://github.com/ansible-collections/community.general/pull/7179).
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- CmdRunner module utils - does not attempt to resolve path if executable is a relative or absolute path (https://github.com/ansible-collections/community.general/pull/7200).
|
||||||
|
- nmap inventory plugin - now uses ``get_option`` in all cases to get its configuration information (https://github.com/ansible-collections/community.general/pull/7119).
|
||||||
|
- nsupdate - fix a possible ``list index out of range`` exception (https://github.com/ansible-collections/community.general/issues/836).
|
||||||
|
- oci_utils module util - fix inappropriate logical comparison expressions and makes them simpler. The previous checks had logical short circuits (https://github.com/ansible-collections/community.general/pull/7125).
|
||||||
|
- pritunl module utils - fix incorrect URL parameter for orgnization add method (https://github.com/ansible-collections/community.general/pull/7161).
|
||||||
|
- snap - an exception was being raised when snap list was empty (https://github.com/ansible-collections/community.general/pull/7124, https://github.com/ansible-collections/community.general/issues/7120).
|
||||||
|
|
||||||
|
New Modules
|
||||||
|
-----------
|
||||||
|
|
||||||
|
- jenkins_build_info - Get information about Jenkins builds
|
||||||
|
- pnpm - Manage node.js packages with pnpm
|
||||||
|
|
||||||
|
v7.3.0
|
||||||
|
======
|
||||||
|
|
||||||
|
Release Summary
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Feature and bugfix release.
|
||||||
|
|
||||||
|
Minor Changes
|
||||||
|
-------------
|
||||||
|
|
||||||
|
- chroot connection plugin - add ``disable_root_check`` option (https://github.com/ansible-collections/community.general/pull/7099).
|
||||||
|
- ejabberd_user - module now using ``CmdRunner`` to execute external command (https://github.com/ansible-collections/community.general/pull/7075).
|
||||||
|
- ipa_config - add module parameters to manage FreeIPA user and group objectclasses (https://github.com/ansible-collections/community.general/pull/7019).
|
||||||
|
- ipa_config - adds ``idp`` choice to ``ipauserauthtype`` parameter's choices (https://github.com/ansible-collections/community.general/pull/7051).
|
||||||
|
- npm - module now using ``CmdRunner`` to execute external commands (https://github.com/ansible-collections/community.general/pull/6989).
|
||||||
|
- proxmox_kvm - enabled force restart of VM, bringing the ``force`` parameter functionality in line with what is described in the docs (https://github.com/ansible-collections/community.general/pull/6914).
|
||||||
|
- proxmox_vm_info - ``node`` parameter is no longer required. Information can be obtained for the whole cluster (https://github.com/ansible-collections/community.general/pull/6976).
|
||||||
|
- proxmox_vm_info - non-existing provided by name/vmid VM would return empty results instead of failing (https://github.com/ansible-collections/community.general/pull/7049).
|
||||||
|
- redfish_config - add ``DeleteAllVolumes`` command to allow deletion of all volumes on servers (https://github.com/ansible-collections/community.general/pull/6814).
|
||||||
|
- redfish_utils - use ``Controllers`` key in redfish data to obtain Storage controllers properties (https://github.com/ansible-collections/community.general/pull/7081).
|
||||||
|
- redfish_utils module utils - add support for ``PowerCycle`` reset type for ``redfish_command`` responses feature (https://github.com/ansible-collections/community.general/issues/7083).
|
||||||
|
- redfish_utils module utils - add support for following ``@odata.nextLink`` pagination in ``software_inventory`` responses feature (https://github.com/ansible-collections/community.general/pull/7020).
|
||||||
|
- shutdown - use ``shutdown -p ...`` with FreeBSD to halt and power off machine (https://github.com/ansible-collections/community.general/pull/7102).
|
||||||
|
- sorcery - add grimoire (repository) management support (https://github.com/ansible-collections/community.general/pull/7012).
|
||||||
|
|
||||||
|
Deprecated Features
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
- ejabberd_user - deprecate the parameter ``logging`` in favour of producing more detailed information in the module output (https://github.com/ansible-collections/community.general/pull/7043).
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- bitwarden lookup plugin - the plugin made assumptions about the structure of a Bitwarden JSON object which may have been broken by an update in the Bitwarden API. Remove assumptions, and allow queries for general fields such as ``notes`` (https://github.com/ansible-collections/community.general/pull/7061).
|
||||||
|
- ejabberd_user - module was failing to detect whether user was already created and/or password was changed (https://github.com/ansible-collections/community.general/pull/7033).
|
||||||
|
- keycloak module util - fix missing ``http_agent``, ``timeout``, and ``validate_certs`` ``open_url()`` parameters (https://github.com/ansible-collections/community.general/pull/7067).
|
||||||
|
- keycloak_client inventory plugin - fix missing client secret (https://github.com/ansible-collections/community.general/pull/6931).
|
||||||
|
- lvol - add support for percentage of origin size specification when creating snapshot volumes (https://github.com/ansible-collections/community.general/issues/1630, https://github.com/ansible-collections/community.general/pull/7053).
|
||||||
|
- lxc connection plugin - now handles ``remote_addr`` defaulting to ``inventory_hostname`` correctly (https://github.com/ansible-collections/community.general/pull/7104).
|
||||||
|
- oci_utils module utils - avoid direct type comparisons (https://github.com/ansible-collections/community.general/pull/7085).
|
||||||
|
- proxmox_user_info - avoid direct type comparisons (https://github.com/ansible-collections/community.general/pull/7085).
|
||||||
|
- snap - fix crash when multiple snaps are specified and one has ``---`` in its description (https://github.com/ansible-collections/community.general/pull/7046).
|
||||||
|
- sorcery - fix interruption of the multi-stage process (https://github.com/ansible-collections/community.general/pull/7012).
|
||||||
|
- sorcery - fix queue generation before the whole system rebuild (https://github.com/ansible-collections/community.general/pull/7012).
|
||||||
|
- sorcery - latest state no longer triggers update_cache (https://github.com/ansible-collections/community.general/pull/7012).
|
||||||
|
|
||||||
v7.2.1
|
v7.2.1
|
||||||
======
|
======
|
||||||
|
|
||||||
@@ -422,7 +665,7 @@ Deprecated Features
|
|||||||
we have not heard about anyone using them in those setups. Hence, these
|
we have not heard about anyone using them in those setups. Hence, these
|
||||||
modules are deprecated, and will be removed in community.general 10.0.0
|
modules are deprecated, and will be removed in community.general 10.0.0
|
||||||
in case there are no reports about being still useful, and potentially
|
in case there are no reports about being still useful, and potentially
|
||||||
noone that steps up to maintain them
|
no one that steps up to maintain them
|
||||||
(https://github.com/ansible-collections/community.general/pull/6493).
|
(https://github.com/ansible-collections/community.general/pull/6493).
|
||||||
|
|
||||||
Removed Features (previously deprecated)
|
Removed Features (previously deprecated)
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ If you encounter abusive behavior violating the [Ansible Code of Conduct](https:
|
|||||||
|
|
||||||
## Tested with Ansible
|
## Tested with Ansible
|
||||||
|
|
||||||
Tested with the current ansible-core 2.11, ansible-core 2.12, ansible-core 2.13, ansible-core 2.14, ansible-core 2.15 releases and the current development version of ansible-core. Ansible-core versions before 2.11.0 are not supported. This includes all ansible-base 2.10 and Ansible 2.9 releases.
|
Tested with the current ansible-core 2.11, ansible-core 2.12, ansible-core 2.13, ansible-core 2.14, ansible-core 2.15, ansible-core 2.16 releases and the current development version of ansible-core. Ansible-core versions before 2.11.0 are not supported. This includes all ansible-base 2.10 and Ansible 2.9 releases.
|
||||||
|
|
||||||
Parts of this collection will not work with ansible-core 2.11 on Python 3.12+.
|
Parts of this collection will not work with ansible-core 2.11 on Python 3.12+.
|
||||||
|
|
||||||
@@ -34,13 +34,13 @@ Some modules and plugins require external libraries. Please check the requiremen
|
|||||||
|
|
||||||
## Included content
|
## Included content
|
||||||
|
|
||||||
Please check the included content on the [Ansible Galaxy page for this collection](https://galaxy.ansible.com/community/general) or the [documentation on the Ansible docs site](https://docs.ansible.com/ansible/latest/collections/community/general/).
|
Please check the included content on the [Ansible Galaxy page for this collection](https://galaxy.ansible.com/ui/repo/published/community/general/) or the [documentation on the Ansible docs site](https://docs.ansible.com/ansible/latest/collections/community/general/).
|
||||||
|
|
||||||
## Using this collection
|
## Using this collection
|
||||||
|
|
||||||
This collection is shipped with the Ansible package. So if you have it installed, no more action is required.
|
This collection is shipped with the Ansible package. So if you have it installed, no more action is required.
|
||||||
|
|
||||||
If you have a minimal installation (only Ansible Core installed) or you want to use the latest version of the collection along with the whole Ansible package, you need to install the collection from [Ansible Galaxy](https://galaxy.ansible.com/community/general) manually with the `ansible-galaxy` command-line tool:
|
If you have a minimal installation (only Ansible Core installed) or you want to use the latest version of the collection along with the whole Ansible package, you need to install the collection from [Ansible Galaxy](https://galaxy.ansible.com/ui/repo/published/community/general/) manually with the `ansible-galaxy` command-line tool:
|
||||||
|
|
||||||
ansible-galaxy collection install community.general
|
ansible-galaxy collection install community.general
|
||||||
|
|
||||||
@@ -57,7 +57,7 @@ Note that if you install the collection manually, it will not be upgraded automa
|
|||||||
ansible-galaxy collection install community.general --upgrade
|
ansible-galaxy collection install community.general --upgrade
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also install a specific version of the collection, for example, if you need to downgrade when something is broken in the latest version (please report an issue in this repository). Use the following syntax where `X.Y.Z` can be any [available version](https://galaxy.ansible.com/community/general):
|
You can also install a specific version of the collection, for example, if you need to downgrade when something is broken in the latest version (please report an issue in this repository). Use the following syntax where `X.Y.Z` can be any [available version](https://galaxy.ansible.com/ui/repo/published/community/general/):
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
ansible-galaxy collection install community.general:==X.Y.Z
|
ansible-galaxy collection install community.general:==X.Y.Z
|
||||||
|
|||||||
@@ -331,7 +331,7 @@ releases:
|
|||||||
|
|
||||||
in case there are no reports about being still useful, and potentially
|
in case there are no reports about being still useful, and potentially
|
||||||
|
|
||||||
noone that steps up to maintain them
|
no one that steps up to maintain them
|
||||||
|
|
||||||
(https://github.com/ansible-collections/community.general/pull/6493).
|
(https://github.com/ansible-collections/community.general/pull/6493).
|
||||||
|
|
||||||
@@ -1295,3 +1295,416 @@ releases:
|
|||||||
- 6983-rundeck-fix-typerrror-on-404-api-response.yml
|
- 6983-rundeck-fix-typerrror-on-404-api-response.yml
|
||||||
- 7.2.1.yml
|
- 7.2.1.yml
|
||||||
release_date: '2023-07-31'
|
release_date: '2023-07-31'
|
||||||
|
7.3.0:
|
||||||
|
changes:
|
||||||
|
bugfixes:
|
||||||
|
- bitwarden lookup plugin - the plugin made assumptions about the structure
|
||||||
|
of a Bitwarden JSON object which may have been broken by an update in the
|
||||||
|
Bitwarden API. Remove assumptions, and allow queries for general fields such
|
||||||
|
as ``notes`` (https://github.com/ansible-collections/community.general/pull/7061).
|
||||||
|
- ejabberd_user - module was failing to detect whether user was already created
|
||||||
|
and/or password was changed (https://github.com/ansible-collections/community.general/pull/7033).
|
||||||
|
- keycloak module util - fix missing ``http_agent``, ``timeout``, and ``validate_certs``
|
||||||
|
``open_url()`` parameters (https://github.com/ansible-collections/community.general/pull/7067).
|
||||||
|
- keycloak_client inventory plugin - fix missing client secret (https://github.com/ansible-collections/community.general/pull/6931).
|
||||||
|
- lvol - add support for percentage of origin size specification when creating
|
||||||
|
snapshot volumes (https://github.com/ansible-collections/community.general/issues/1630,
|
||||||
|
https://github.com/ansible-collections/community.general/pull/7053).
|
||||||
|
- lxc connection plugin - now handles ``remote_addr`` defaulting to ``inventory_hostname``
|
||||||
|
correctly (https://github.com/ansible-collections/community.general/pull/7104).
|
||||||
|
- oci_utils module utils - avoid direct type comparisons (https://github.com/ansible-collections/community.general/pull/7085).
|
||||||
|
- proxmox_user_info - avoid direct type comparisons (https://github.com/ansible-collections/community.general/pull/7085).
|
||||||
|
- snap - fix crash when multiple snaps are specified and one has ``---`` in
|
||||||
|
its description (https://github.com/ansible-collections/community.general/pull/7046).
|
||||||
|
- sorcery - fix interruption of the multi-stage process (https://github.com/ansible-collections/community.general/pull/7012).
|
||||||
|
- sorcery - fix queue generation before the whole system rebuild (https://github.com/ansible-collections/community.general/pull/7012).
|
||||||
|
- sorcery - latest state no longer triggers update_cache (https://github.com/ansible-collections/community.general/pull/7012).
|
||||||
|
deprecated_features:
|
||||||
|
- ejabberd_user - deprecate the parameter ``logging`` in favour of producing
|
||||||
|
more detailed information in the module output (https://github.com/ansible-collections/community.general/pull/7043).
|
||||||
|
minor_changes:
|
||||||
|
- chroot connection plugin - add ``disable_root_check`` option (https://github.com/ansible-collections/community.general/pull/7099).
|
||||||
|
- ejabberd_user - module now using ``CmdRunner`` to execute external command
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/7075).
|
||||||
|
- ipa_config - add module parameters to manage FreeIPA user and group objectclasses
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/7019).
|
||||||
|
- ipa_config - adds ``idp`` choice to ``ipauserauthtype`` parameter's choices
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/7051).
|
||||||
|
- npm - module now using ``CmdRunner`` to execute external commands (https://github.com/ansible-collections/community.general/pull/6989).
|
||||||
|
- proxmox_kvm - enabled force restart of VM, bringing the ``force`` parameter
|
||||||
|
functionality in line with what is described in the docs (https://github.com/ansible-collections/community.general/pull/6914).
|
||||||
|
- proxmox_vm_info - ``node`` parameter is no longer required. Information can
|
||||||
|
be obtained for the whole cluster (https://github.com/ansible-collections/community.general/pull/6976).
|
||||||
|
- proxmox_vm_info - non-existing provided by name/vmid VM would return empty
|
||||||
|
results instead of failing (https://github.com/ansible-collections/community.general/pull/7049).
|
||||||
|
- redfish_config - add ``DeleteAllVolumes`` command to allow deletion of all
|
||||||
|
volumes on servers (https://github.com/ansible-collections/community.general/pull/6814).
|
||||||
|
- redfish_utils - use ``Controllers`` key in redfish data to obtain Storage
|
||||||
|
controllers properties (https://github.com/ansible-collections/community.general/pull/7081).
|
||||||
|
- redfish_utils module utils - add support for ``PowerCycle`` reset type for
|
||||||
|
``redfish_command`` responses feature (https://github.com/ansible-collections/community.general/issues/7083).
|
||||||
|
- redfish_utils module utils - add support for following ``@odata.nextLink``
|
||||||
|
pagination in ``software_inventory`` responses feature (https://github.com/ansible-collections/community.general/pull/7020).
|
||||||
|
- shutdown - use ``shutdown -p ...`` with FreeBSD to halt and power off machine
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/7102).
|
||||||
|
- sorcery - add grimoire (repository) management support (https://github.com/ansible-collections/community.general/pull/7012).
|
||||||
|
release_summary: Feature and bugfix release.
|
||||||
|
fragments:
|
||||||
|
- 6814-redfish-config-add-delete-all-volumes.yml
|
||||||
|
- 6914-proxmox_kvm-enable-force-restart.yml
|
||||||
|
- 6931-keycloak_client-inventory-bugfix.yml
|
||||||
|
- 6976-proxmox-vm-info-not-require-node.yml
|
||||||
|
- 6989-npm-cmdrunner.yml
|
||||||
|
- 7.3.0.yml
|
||||||
|
- 7012-sorcery-grimoire-mgmt.yml
|
||||||
|
- 7019-ipa_config-user-and-group-objectclasses.yml
|
||||||
|
- 7020-redfish-utils-pagination.yml
|
||||||
|
- 7033-ejabberd-user-bugs.yml
|
||||||
|
- 7043-ejabberd-user-deprecate-logging.yml
|
||||||
|
- 7046-snap-newline-before-separator.yml
|
||||||
|
- 7049-proxmox-vm-info-empty-results.yml
|
||||||
|
- 7051-ipa-config-new-choice-idp-to-ipauserauthtype.yml
|
||||||
|
- 7061-fix-bitwarden-get_field.yml
|
||||||
|
- 7067-keycloak-api-paramerter-fix.yml
|
||||||
|
- 7075-ejabberd-user-cmdrunner.yml
|
||||||
|
- 7081-redfish-utils-fix-for-storagecontrollers-deprecated-key.yaml
|
||||||
|
- 7085-sanity.yml
|
||||||
|
- 7099-chroot-disable-root-check-option.yml
|
||||||
|
- 7102-freebsd-shutdown-p.yml
|
||||||
|
- 7104_fix_lxc_remoteaddr_default.yml
|
||||||
|
- 7113-redfish-utils-power-cycle.yml
|
||||||
|
- lvol-pct-of-origin.yml
|
||||||
|
release_date: '2023-08-15'
|
||||||
|
7.4.0:
|
||||||
|
changes:
|
||||||
|
bugfixes:
|
||||||
|
- CmdRunner module utils - does not attempt to resolve path if executable is
|
||||||
|
a relative or absolute path (https://github.com/ansible-collections/community.general/pull/7200).
|
||||||
|
- nmap inventory plugin - now uses ``get_option`` in all cases to get its configuration
|
||||||
|
information (https://github.com/ansible-collections/community.general/pull/7119).
|
||||||
|
- nsupdate - fix a possible ``list index out of range`` exception (https://github.com/ansible-collections/community.general/issues/836).
|
||||||
|
- oci_utils module util - fix inappropriate logical comparison expressions and
|
||||||
|
makes them simpler. The previous checks had logical short circuits (https://github.com/ansible-collections/community.general/pull/7125).
|
||||||
|
- pritunl module utils - fix incorrect URL parameter for orgnization add method
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/7161).
|
||||||
|
- snap - an exception was being raised when snap list was empty (https://github.com/ansible-collections/community.general/pull/7124,
|
||||||
|
https://github.com/ansible-collections/community.general/issues/7120).
|
||||||
|
minor_changes:
|
||||||
|
- cobbler inventory plugin - add ``exclude_mgmt_classes`` and ``include_mgmt_classes``
|
||||||
|
options to exclude or include hosts based on management classes (https://github.com/ansible-collections/community.general/pull/7184).
|
||||||
|
- cpanm - minor refactor when creating the ``CmdRunner`` object (https://github.com/ansible-collections/community.general/pull/7231).
|
||||||
|
- gitlab_group_variable - add support for ``raw`` variables suboption (https://github.com/ansible-collections/community.general/pull/7132).
|
||||||
|
- gitlab_project_variable - add support for ``raw`` variables suboption (https://github.com/ansible-collections/community.general/pull/7132).
|
||||||
|
- jenkins_build - add new ``detach`` option, which allows the module to exit
|
||||||
|
successfully as long as the build is created (default functionality is still
|
||||||
|
waiting for the build to end before exiting) (https://github.com/ansible-collections/community.general/pull/7204).
|
||||||
|
- jenkins_build - add new ``time_between_checks`` option, which allows to configure
|
||||||
|
the wait time between requests to the Jenkins server (https://github.com/ansible-collections/community.general/pull/7204).
|
||||||
|
- make - allows ``params`` to be used without value (https://github.com/ansible-collections/community.general/pull/7180).
|
||||||
|
- nmap inventory plugin - now has a ``use_arp_ping`` option to allow the user
|
||||||
|
to disable the default ARP ping query for a more reliable form (https://github.com/ansible-collections/community.general/pull/7119).
|
||||||
|
- pagerduty - adds in option to use v2 API for creating pagerduty incidents
|
||||||
|
(https://github.com/ansible-collections/community.general/issues/6151)
|
||||||
|
- pritunl module utils - ensure ``validate_certs`` parameter is honoured in
|
||||||
|
all methods (https://github.com/ansible-collections/community.general/pull/7156).
|
||||||
|
- redfish_info - report ``Id`` in the output of ``GetManagerInventory`` (https://github.com/ansible-collections/community.general/pull/7140).
|
||||||
|
- redfish_utils module utils - support ``Volumes`` in response for ``GetDiskInventory``
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/6819).
|
||||||
|
- unixy callback plugin - add support for ``check_mode_markers`` option (https://github.com/ansible-collections/community.general/pull/7179).
|
||||||
|
release_summary: Bugfix and feature release.
|
||||||
|
fragments:
|
||||||
|
- 6819-redfish-utils-add-links-parameter-for-get_disk_inventory.yml
|
||||||
|
- 7.4.0.yml
|
||||||
|
- 7118-nmap_inv_plugin_no_arp_option.yml
|
||||||
|
- 7124-snap-empty-list.yml
|
||||||
|
- 7125-fix-inappropriate-comparison.yml
|
||||||
|
- 7132-gitlab-raw-variables.yml
|
||||||
|
- 7140-id-getmanagerinv-output.yml
|
||||||
|
- 7156-ensure-validate-certs-parameter-is-honoured.yml
|
||||||
|
- 7161-fix-incorrect-post-parameter.yml
|
||||||
|
- 7179-unixy-support-checkmode-markers.yml
|
||||||
|
- 7180-make_params_without_value.yml
|
||||||
|
- 7184-cobbler-mgmt-classes.yml
|
||||||
|
- 7200-cmd-runner-abs-path.yml
|
||||||
|
- 7219-fix-nsupdate-cname.yaml
|
||||||
|
- 7231-cpanm-adjustments.yml
|
||||||
|
- improvements-to-jenkins-build-module.yml
|
||||||
|
- update-v2-pagerduty-alert.yml
|
||||||
|
modules:
|
||||||
|
- description: Get information about Jenkins builds
|
||||||
|
name: jenkins_build_info
|
||||||
|
namespace: ''
|
||||||
|
- description: Manage node.js packages with pnpm
|
||||||
|
name: pnpm
|
||||||
|
namespace: ''
|
||||||
|
release_date: '2023-09-11'
|
||||||
|
7.5.0:
|
||||||
|
changes:
|
||||||
|
bugfixes:
|
||||||
|
- gitlab_group_variable - deleted all variables when used with ``purge=true``
|
||||||
|
due to missing ``raw`` property in KNOWN attributes (https://github.com/ansible-collections/community.general/issues/7250).
|
||||||
|
- gitlab_project_variable - deleted all variables when used with ``purge=true``
|
||||||
|
due to missing ``raw`` property in KNOWN attributes (https://github.com/ansible-collections/community.general/issues/7250).
|
||||||
|
- ldap_search - fix string normalization and the ``base64_attributes`` option
|
||||||
|
on Python 3 (https://github.com/ansible-collections/community.general/issues/5704,
|
||||||
|
https://github.com/ansible-collections/community.general/pull/7264).
|
||||||
|
- lxc connection plugin - properly evaluate options (https://github.com/ansible-collections/community.general/pull/7369).
|
||||||
|
- mail - skip headers containing equals characters due to missing ``maxsplit``
|
||||||
|
on header key/value parsing (https://github.com/ansible-collections/community.general/pull/7303).
|
||||||
|
- nmap inventory plugin - fix ``get_option`` calls (https://github.com/ansible-collections/community.general/pull/7323).
|
||||||
|
- onepassword - fix KeyError exception when trying to access value of a field
|
||||||
|
that is not filled out in OnePassword item (https://github.com/ansible-collections/community.general/pull/7241).
|
||||||
|
- snap - change the change detection mechanism from "parsing installation" to
|
||||||
|
"comparing end state with initial state" (https://github.com/ansible-collections/community.general/pull/7340,
|
||||||
|
https://github.com/ansible-collections/community.general/issues/7265).
|
||||||
|
- terraform - prevents ``-backend-config`` option double encapsulating with
|
||||||
|
``shlex_quote`` function. (https://github.com/ansible-collections/community.general/pull/7301).
|
||||||
|
deprecated_features:
|
||||||
|
- The next major release, community.general 8.0.0, will drop support for ansible-core
|
||||||
|
2.11 and 2.12, which have been End of Life for some time now. This means that
|
||||||
|
this collection no longer supports Python 2.6 on the target. Individual content
|
||||||
|
might still work with unsupported ansible-core versions, but that can change
|
||||||
|
at any time. Also please note that from now on, for every new major community.general
|
||||||
|
release, we will drop support for all ansible-core versions that have been
|
||||||
|
End of Life for more than a few weeks on the date of the major release (https://github.com/ansible-community/community-topics/discussions/271,
|
||||||
|
https://github.com/ansible-collections/community.general/pull/7259).
|
||||||
|
- redfish_info, redfish_config, redfish_command - the default value ``10`` for
|
||||||
|
the ``timeout`` option is deprecated and will change to ``60`` in community.general
|
||||||
|
9.0.0 (https://github.com/ansible-collections/community.general/pull/7295).
|
||||||
|
minor_changes:
|
||||||
|
- cargo - add option ``executable``, which allows user to specify path to the
|
||||||
|
cargo binary (https://github.com/ansible-collections/community.general/pull/7352).
|
||||||
|
- cargo - add option ``locked`` which allows user to specify install the locked
|
||||||
|
version of dependency instead of latest compatible version (https://github.com/ansible-collections/community.general/pull/6134).
|
||||||
|
- dig lookup plugin - add TCP option to enable the use of TCP connection during
|
||||||
|
DNS lookup (https://github.com/ansible-collections/community.general/pull/7343).
|
||||||
|
- 'gitlab_group - add option ``force_delete`` (default: false) which allows
|
||||||
|
delete group even if projects exists in it (https://github.com/ansible-collections/community.general/pull/7364).'
|
||||||
|
- ini_file - add ``ignore_spaces`` option (https://github.com/ansible-collections/community.general/pull/7273).
|
||||||
|
- newrelic_deployment - add option ``app_name_exact_match``, which filters results
|
||||||
|
for the exact app_name provided (https://github.com/ansible-collections/community.general/pull/7355).
|
||||||
|
- onepassword lookup plugin - introduce ``account_id`` option which allows specifying
|
||||||
|
which account to use (https://github.com/ansible-collections/community.general/pull/7308).
|
||||||
|
- onepassword_raw lookup plugin - introduce ``account_id`` option which allows
|
||||||
|
specifying which account to use (https://github.com/ansible-collections/community.general/pull/7308).
|
||||||
|
- parted - on resize, use ``--fix`` option if available (https://github.com/ansible-collections/community.general/pull/7304).
|
||||||
|
- pnpm - set correct version when state is latest or version is not mentioned.
|
||||||
|
Resolves previous idempotency problem (https://github.com/ansible-collections/community.general/pull/7339).
|
||||||
|
- proxmox - add ``vmid`` (and ``taskid`` when possible) to return values (https://github.com/ansible-collections/community.general/pull/7263).
|
||||||
|
- random_string - added new ``ignore_similar_chars`` and ``similar_chars`` option
|
||||||
|
to ignore certain chars (https://github.com/ansible-collections/community.general/pull/7242).
|
||||||
|
- redfish_command - add new option ``update_oem_params`` for the ``MultipartHTTPPushUpdate``
|
||||||
|
command (https://github.com/ansible-collections/community.general/issues/7331).
|
||||||
|
- redfish_config - add ``CreateVolume`` command to allow creation of volumes
|
||||||
|
on servers (https://github.com/ansible-collections/community.general/pull/6813).
|
||||||
|
- redfish_config - adding ``SetSecureBoot`` command (https://github.com/ansible-collections/community.general/pull/7129).
|
||||||
|
- redfish_info - add support for ``GetBiosRegistries`` command (https://github.com/ansible-collections/community.general/pull/7144).
|
||||||
|
- redfish_info - adds ``LinkStatus`` to NIC inventory (https://github.com/ansible-collections/community.general/pull/7318).
|
||||||
|
- redis_info - refactor the redis_info module to use the redis module_utils
|
||||||
|
enabling to pass TLS parameters to the Redis client (https://github.com/ansible-collections/community.general/pull/7267).
|
||||||
|
- supervisorctl - allow to stop matching running processes before removing them
|
||||||
|
with ``stop_before_removing=true`` (https://github.com/ansible-collections/community.general/pull/7284).
|
||||||
|
release_summary: 'Regular bugfix and feature release.
|
||||||
|
|
||||||
|
|
||||||
|
Please note that this is the last minor 7.x.0 release. Further releases
|
||||||
|
|
||||||
|
with major version 7 will be bugfix releases 7.5.y.
|
||||||
|
|
||||||
|
'
|
||||||
|
fragments:
|
||||||
|
- 6134-add-locked-option-for-cargo.yml
|
||||||
|
- 6813-redfish-config-add-create-volume.yml
|
||||||
|
- 7.5.0.yml
|
||||||
|
- 7129-adding_set_secure_boot_command_to_redfish_config.yml
|
||||||
|
- 7144-add-getbiosregistry-command-to-redfish-info.yml
|
||||||
|
- 7241-prevent-key-error-when-value-does-not-exist.yml
|
||||||
|
- 7242_ignore_similar_chars.yml
|
||||||
|
- 7251-gitlab-variables-deleteing-all-variables.yml
|
||||||
|
- 7263-proxmox-return-vmid-and-taskid.yaml
|
||||||
|
- 7264-ldap_search-strings.yml
|
||||||
|
- 7267-redis_info.yml
|
||||||
|
- 7273-ini_file_ignore_spaces.yml
|
||||||
|
- 7284-supervisorctl-stop-before-remove.yaml
|
||||||
|
- 7295-adding_deprecation_for_timeout_in_redfish_info_config_command.yml
|
||||||
|
- 7301-fix-backend-config-string-encapsulation.yml
|
||||||
|
- 7303-mail-incorrect-header-parsing.yml
|
||||||
|
- 7304-prevent-parted-warnings.yml
|
||||||
|
- 7308-onepassword-multi-acc.yml
|
||||||
|
- 7318-add-linkstatus-attribute-to-nic-inventory.yml
|
||||||
|
- 7323-nmap.yml
|
||||||
|
- 7330-redfish-utils-oem-params.yml
|
||||||
|
- 7339-pnpm-correct-version-when-state-latest.yml
|
||||||
|
- 7340-snap-fix.yml
|
||||||
|
- 7343-dig-tcp-option.yml
|
||||||
|
- 7352-add-executable-option-for-cargo.yml
|
||||||
|
- 7355-newrelic-deployment-add-exact-name.yml
|
||||||
|
- 7364-add-option-force-gitlab-group.yml
|
||||||
|
- 7369-fix-lxc-options.yml
|
||||||
|
- deprecate-ansible-core-2.11-2.12.yml
|
||||||
|
modules:
|
||||||
|
- description: Manipulate Consul roles
|
||||||
|
name: consul_role
|
||||||
|
namespace: ''
|
||||||
|
- description: Set default handler for MIME type, for applications using Gnome
|
||||||
|
GIO
|
||||||
|
name: gio_mime
|
||||||
|
namespace: ''
|
||||||
|
- description: Allows administration of Keycloak client custom Javascript policies
|
||||||
|
via Keycloak API
|
||||||
|
name: keycloak_authz_custom_policy
|
||||||
|
namespace: ''
|
||||||
|
- description: Allows administration of Keycloak realm keys via Keycloak API
|
||||||
|
name: keycloak_realm_key
|
||||||
|
namespace: ''
|
||||||
|
- description: Manage services on Source Mage GNU/Linux
|
||||||
|
name: simpleinit_msb
|
||||||
|
namespace: ''
|
||||||
|
release_date: '2023-10-09'
|
||||||
|
7.5.1:
|
||||||
|
changes:
|
||||||
|
bugfixes:
|
||||||
|
- composer - fix impossible to run ``working_dir`` dependent commands. The module
|
||||||
|
was throwing an error when trying to run a ``working_dir`` dependent command,
|
||||||
|
because it tried to get the command help without passing the ``working_dir``
|
||||||
|
(https://github.com/ansible-collections/community.general/issues/3787).
|
||||||
|
- github_deploy_key - fix pagination behaviour causing a crash when only a single
|
||||||
|
page of deploy keys exist (https://github.com/ansible-collections/community.general/pull/7375).
|
||||||
|
- gitlab_group_members - fix gitlab constants call in ``gitlab_group_members``
|
||||||
|
module (https://github.com/ansible-collections/community.general/issues/7467).
|
||||||
|
- gitlab_project_members - fix gitlab constants call in ``gitlab_project_members``
|
||||||
|
module (https://github.com/ansible-collections/community.general/issues/7467).
|
||||||
|
- gitlab_protected_branches - fix gitlab constants call in ``gitlab_protected_branches``
|
||||||
|
module (https://github.com/ansible-collections/community.general/issues/7467).
|
||||||
|
- gitlab_user - fix gitlab constants call in ``gitlab_user`` module (https://github.com/ansible-collections/community.general/issues/7467).
|
||||||
|
- kernel_blacklist - simplified the mechanism to update the file, fixing the
|
||||||
|
error (https://github.com/ansible-collections/community.general/pull/7382,
|
||||||
|
https://github.com/ansible-collections/community.general/issues/7362).
|
||||||
|
- memset module utils - make compatible with ansible-core 2.17 (https://github.com/ansible-collections/community.general/pull/7379).
|
||||||
|
- proxmox_pool_member - absent state for type VM did not delete VMs from the
|
||||||
|
pools (https://github.com/ansible-collections/community.general/pull/7464).
|
||||||
|
- redfish_command - fix usage of message parsing in ``SimpleUpdate`` and ``MultipartHTTPPushUpdate``
|
||||||
|
commands to treat the lack of a ``MessageId`` as no message (https://github.com/ansible-collections/community.general/issues/7465,
|
||||||
|
https://github.com/ansible-collections/community.general/pull/7471).
|
||||||
|
- 'redhat_subscription - use the right D-Bus options for the consumer type when
|
||||||
|
|
||||||
|
registering a RHEL system older than 9 or a RHEL 9 system older than 9.2
|
||||||
|
|
||||||
|
and using ``consumer_type``
|
||||||
|
|
||||||
|
(https://github.com/ansible-collections/community.general/pull/7378).
|
||||||
|
|
||||||
|
'
|
||||||
|
- selective callback plugin - fix length of task name lines in output always
|
||||||
|
being 3 characters longer than desired (https://github.com/ansible-collections/community.general/pull/7374).
|
||||||
|
release_summary: Regular bugfix release.
|
||||||
|
fragments:
|
||||||
|
- 3787-pass-composer-working-dir.yml
|
||||||
|
- 7.5.1.yml
|
||||||
|
- 7374-fix-selective-callback-taskname-length.yml
|
||||||
|
- 7375-fix-github-deploy-key-pagination.yml
|
||||||
|
- 7378-redhat_subscription-dbus-consumer-type.yaml
|
||||||
|
- 7379-url.yml
|
||||||
|
- 7382-kernel-blacklist-bugfix.yml
|
||||||
|
- 7464-fix-vm-removal-in-proxmox_pool_member.yml
|
||||||
|
- 7465-redfish-firmware-update-message-id-hardening.yml
|
||||||
|
- 7467-fix-gitlab-constants-calls.yml
|
||||||
|
release_date: '2023-11-06'
|
||||||
|
7.5.2:
|
||||||
|
changes:
|
||||||
|
bugfixes:
|
||||||
|
- cloudflare_dns - fix Cloudflare lookup of SHFP records (https://github.com/ansible-collections/community.general/issues/7652).
|
||||||
|
- interface_files - also consider ``address_family`` when changing ``option=method``
|
||||||
|
(https://github.com/ansible-collections/community.general/issues/7610, https://github.com/ansible-collections/community.general/pull/7612).
|
||||||
|
- irc - replace ``ssl.wrap_socket`` that was removed from Python 3.12 with code
|
||||||
|
for creating a proper SSL context (https://github.com/ansible-collections/community.general/pull/7542).
|
||||||
|
- keycloak_* - fix Keycloak API client to quote ``/`` properly (https://github.com/ansible-collections/community.general/pull/7641).
|
||||||
|
- keycloak_authz_permission - resource payload variable for scope-based permission
|
||||||
|
was constructed as a string, when it needs to be a list, even for a single
|
||||||
|
item (https://github.com/ansible-collections/community.general/issues/7151).
|
||||||
|
- log_entries callback plugin - replace ``ssl.wrap_socket`` that was removed
|
||||||
|
from Python 3.12 with code for creating a proper SSL context (https://github.com/ansible-collections/community.general/pull/7542).
|
||||||
|
- lvol - test for output messages in both ``stdout`` and ``stderr`` (https://github.com/ansible-collections/community.general/pull/7601,
|
||||||
|
https://github.com/ansible-collections/community.general/issues/7182).
|
||||||
|
- ocapi_utils, oci_utils, redfish_utils module utils - replace ``type()`` calls
|
||||||
|
with ``isinstance()`` calls (https://github.com/ansible-collections/community.general/pull/7501).
|
||||||
|
- onepassword lookup plugin - field and section titles are now case insensitive
|
||||||
|
when using op CLI version two or later. This matches the behavior of version
|
||||||
|
one (https://github.com/ansible-collections/community.general/pull/7564).
|
||||||
|
- pipx module utils - change the CLI argument formatter for the ``pip_args``
|
||||||
|
parameter (https://github.com/ansible-collections/community.general/issues/7497,
|
||||||
|
https://github.com/ansible-collections/community.general/pull/7506).
|
||||||
|
- 'redhat_subscription - use the D-Bus registration on RHEL 7 only on 7.4 and
|
||||||
|
|
||||||
|
greater; older versions of RHEL 7 do not have it
|
||||||
|
|
||||||
|
(https://github.com/ansible-collections/community.general/issues/7622,
|
||||||
|
|
||||||
|
https://github.com/ansible-collections/community.general/pull/7624).
|
||||||
|
|
||||||
|
'
|
||||||
|
- terraform - fix multiline string handling in complex variables (https://github.com/ansible-collections/community.general/pull/7535).
|
||||||
|
minor_changes:
|
||||||
|
- elastic callback plugin - close elastic client to not leak resources (https://github.com/ansible-collections/community.general/pull/7517).
|
||||||
|
release_summary: Regular bugfix release.
|
||||||
|
fragments:
|
||||||
|
- 000-redhat_subscription-dbus-on-7.4-plus.yaml
|
||||||
|
- 7.5.2.yml
|
||||||
|
- 7151-fix-keycloak_authz_permission-incorrect-resource-payload.yml
|
||||||
|
- 7501-type.yml
|
||||||
|
- 7506-pipx-pipargs.yml
|
||||||
|
- 7517-elastic-close-client.yaml
|
||||||
|
- 7535-terraform-fix-multiline-string-handling-in-complex-variables.yml
|
||||||
|
- 7542-irc-logentries-ssl.yml
|
||||||
|
- 7564-onepassword-lookup-case-insensitive.yaml
|
||||||
|
- 7601-lvol-fix.yml
|
||||||
|
- 7612-interface_file-method.yml
|
||||||
|
- 7641-fix-keycloak-api-client-to-quote-properly.yml
|
||||||
|
- 7653-fix-cloudflare-lookup.yml
|
||||||
|
release_date: '2023-12-04'
|
||||||
|
7.5.3:
|
||||||
|
changes:
|
||||||
|
bugfixes:
|
||||||
|
- keycloak_identity_provider - ``mappers`` processing was not idempotent if
|
||||||
|
the mappers configuration list had not been sorted by name (in ascending order).
|
||||||
|
Fix resolves the issue by sorting mappers in the desired state using the same
|
||||||
|
key which is used for obtaining existing state (https://github.com/ansible-collections/community.general/pull/7418).
|
||||||
|
- keycloak_identity_provider - it was not possible to reconfigure (add, remove)
|
||||||
|
``mappers`` once they were created initially. Removal was ignored, adding
|
||||||
|
new ones resulted in dropping the pre-existing unmodified mappers. Fix resolves
|
||||||
|
the issue by supplying correct input to the internal update call (https://github.com/ansible-collections/community.general/pull/7418).
|
||||||
|
- keycloak_user - when ``force`` is set, but user does not exist, do not try
|
||||||
|
to delete it (https://github.com/ansible-collections/community.general/pull/7696).
|
||||||
|
- statusio_maintenance - fix error caused by incorrectly formed API data payload.
|
||||||
|
Was raising "Failed to create maintenance HTTP Error 400 Bad Request" caused
|
||||||
|
by bad data type for date/time and deprecated dict keys (https://github.com/ansible-collections/community.general/pull/7754).
|
||||||
|
release_summary: Regular bugfix release.
|
||||||
|
fragments:
|
||||||
|
- 7.5.3.yml
|
||||||
|
- 7418-kc_identity_provider-mapper-reconfiguration-fixes.yml
|
||||||
|
- 7696-avoid-attempt-to-delete-non-existing-user.yml
|
||||||
|
- 7754-fixed-payload-format.yml
|
||||||
|
release_date: '2024-01-01'
|
||||||
|
7.5.4:
|
||||||
|
changes:
|
||||||
|
bugfixes:
|
||||||
|
- homebrew - detect already installed formulae and casks using JSON output from
|
||||||
|
``brew info`` (https://github.com/ansible-collections/community.general/issues/864).
|
||||||
|
- ipa_otptoken - the module expect ``ipatokendisabled`` as string but the ``ipatokendisabled``
|
||||||
|
value is returned as a boolean (https://github.com/ansible-collections/community.general/pull/7795).
|
||||||
|
- ldap - previously the order number (if present) was expected to follow an
|
||||||
|
equals sign in the DN. This makes it so the order number string is identified
|
||||||
|
correctly anywhere within the DN (https://github.com/ansible-collections/community.general/issues/7646).
|
||||||
|
- mssql_script - make the module work with Python 2 (https://github.com/ansible-collections/community.general/issues/7818,
|
||||||
|
https://github.com/ansible-collections/community.general/pull/7821).
|
||||||
|
- nmcli - fix ``connection.slave-type`` wired to ``bond`` and not with parameter
|
||||||
|
``slave_type`` in case of connection type ``wifi`` (https://github.com/ansible-collections/community.general/issues/7389).
|
||||||
|
release_summary: Regular bugfix release.
|
||||||
|
fragments:
|
||||||
|
- 7.5.4.yml
|
||||||
|
- 7389-nmcli-issue-with-creating-a-wifi-bridge-slave.yml
|
||||||
|
- 7646-fix-order-number-detection-in-dn.yml
|
||||||
|
- 7797-ipa-fix-otp-idempotency.yml
|
||||||
|
- 7821-mssql_script-py2.yml
|
||||||
|
- 7870-homebrew-cask-installed-detection.yml
|
||||||
|
release_date: '2024-01-29'
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
Counting elements in a sequence
|
Counting elements in a sequence
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
The ``community.general.counter`` filter plugin allows you to count (hashable) elements in a sequence. Elements are returned as dictionary keys and their counts are stored as dictionary values.
|
The :ansplugin:`community.general.counter filter plugin <community.general.counter#filter>` allows you to count (hashable) elements in a sequence. Elements are returned as dictionary keys and their counts are stored as dictionary values.
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
Dictionaries
|
Dictionaries
|
||||||
^^^^^^^^^^^^
|
^^^^^^^^^^^^
|
||||||
|
|
||||||
You can use the ``dict_kv`` filter to create a single-entry dictionary with ``value | community.general.dict_kv(key)``:
|
You can use the :ansplugin:`community.general.dict_kv filter <community.general.dict_kv#filter>` to create a single-entry dictionary with ``value | community.general.dict_kv(key)``:
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
@@ -58,7 +58,7 @@ This produces:
|
|||||||
|
|
||||||
.. versionadded:: 2.0.0
|
.. versionadded:: 2.0.0
|
||||||
|
|
||||||
If you need to convert a list of key-value pairs to a dictionary, you can use the ``dict`` function. Unfortunately, this function cannot be used with ``map``. For this, the ``community.general.dict`` filter can be used:
|
If you need to convert a list of key-value pairs to a dictionary, you can use the ``dict`` function. Unfortunately, this function cannot be used with ``map``. For this, the :ansplugin:`community.general.dict filter <community.general.dict#filter>` can be used:
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
Grouping
|
Grouping
|
||||||
^^^^^^^^
|
^^^^^^^^
|
||||||
|
|
||||||
If you have a list of dictionaries, the Jinja2 ``groupby`` filter allows to group the list by an attribute. This results in a list of ``(grouper, list)`` namedtuples, where ``list`` contains all dictionaries where the selected attribute equals ``grouper``. If you know that for every ``grouper``, there will be a most one entry in that list, you can use the ``community.general.groupby_as_dict`` filter to convert the original list into a dictionary which maps ``grouper`` to the corresponding dictionary.
|
If you have a list of dictionaries, the Jinja2 ``groupby`` filter allows to group the list by an attribute. This results in a list of ``(grouper, list)`` namedtuples, where ``list`` contains all dictionaries where the selected attribute equals ``grouper``. If you know that for every ``grouper``, there will be a most one entry in that list, you can use the :ansplugin:`community.general.groupby_as_dict filter <community.general.groupby_as_dict#filter>` to convert the original list into a dictionary which maps ``grouper`` to the corresponding dictionary.
|
||||||
|
|
||||||
One example is ``ansible_facts.mounts``, which is a list of dictionaries where each has one ``device`` element to indicate the device which is mounted. Therefore, ``ansible_facts.mounts | community.general.groupby_as_dict('device')`` is a dictionary mapping a device to the mount information:
|
One example is ``ansible_facts.mounts``, which is a list of dictionaries where each has one ``device`` element to indicate the device which is mounted. Therefore, ``ansible_facts.mounts | community.general.groupby_as_dict('device')`` is a dictionary mapping a device to the mount information:
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
Merging lists of dictionaries
|
Merging lists of dictionaries
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
If you have two or more lists of dictionaries and want to combine them into a list of merged dictionaries, where the dictionaries are merged by an attribute, you can use the ``lists_mergeby`` filter.
|
If you have two or more lists of dictionaries and want to combine them into a list of merged dictionaries, where the dictionaries are merged by an attribute, you can use the :ansplugin:`community.general.lists_mergeby filter <community.general.lists_mergeby#filter>`.
|
||||||
|
|
||||||
.. note:: The output of the examples in this section use the YAML callback plugin. Quoting: "Ansible output that can be quite a bit easier to read than the default JSON formatting." See :ref:`the documentation for the community.general.yaml callback plugin <ansible_collections.community.general.yaml_callback>`.
|
.. note:: The output of the examples in this section use the YAML callback plugin. Quoting: "Ansible output that can be quite a bit easier to read than the default JSON formatting." See :ref:`the documentation for the community.general.yaml callback plugin <ansible_collections.community.general.yaml_callback>`.
|
||||||
|
|
||||||
@@ -76,15 +76,15 @@ This produces the same result as in the previous example:
|
|||||||
name: meh
|
name: meh
|
||||||
|
|
||||||
|
|
||||||
The filter also accepts two optional parameters: ``recursive`` and ``list_merge``. These parameters are only supported when used with ansible-base 2.10 or ansible-core, but not with Ansible 2.9. This is available since community.general 4.4.0.
|
The filter also accepts two optional parameters: :ansopt:`community.general.lists_mergeby#filter:recursive` and :ansopt:`community.general.lists_mergeby#filter:list_merge`. This is available since community.general 4.4.0.
|
||||||
|
|
||||||
**recursive**
|
**recursive**
|
||||||
Is a boolean, default to ``False``. Should the ``community.general.lists_mergeby`` recursively merge nested hashes. Note: It does not depend on the value of the ``hash_behaviour`` setting in ``ansible.cfg``.
|
Is a boolean, default to ``false``. Should the :ansplugin:`community.general.lists_mergeby#filter` filter recursively merge nested hashes. Note: It does not depend on the value of the ``hash_behaviour`` setting in ``ansible.cfg``.
|
||||||
|
|
||||||
**list_merge**
|
**list_merge**
|
||||||
Is a string, its possible values are ``replace`` (default), ``keep``, ``append``, ``prepend``, ``append_rp`` or ``prepend_rp``. It modifies the behaviour of ``community.general.lists_mergeby`` when the hashes to merge contain arrays/lists.
|
Is a string, its possible values are :ansval:`replace` (default), :ansval:`keep`, :ansval:`append`, :ansval:`prepend`, :ansval:`append_rp` or :ansval:`prepend_rp`. It modifies the behaviour of :ansplugin:`community.general.lists_mergeby#filter` when the hashes to merge contain arrays/lists.
|
||||||
|
|
||||||
The examples below set ``recursive=true`` and display the differences among all six options of ``list_merge``. Functionality of the parameters is exactly the same as in the filter ``combine``. See :ref:`Combining hashes/dictionaries <combine_filter>` to learn details about these options.
|
The examples below set :ansopt:`community.general.lists_mergeby#filter:recursive=true` and display the differences among all six options of :ansopt:`community.general.lists_mergeby#filter:list_merge`. Functionality of the parameters is exactly the same as in the filter :ansplugin:`ansible.builtin.combine#filter`. See :ref:`Combining hashes/dictionaries <combine_filter>` to learn details about these options.
|
||||||
|
|
||||||
Let us use the lists below in the following examples
|
Let us use the lists below in the following examples
|
||||||
|
|
||||||
@@ -110,7 +110,7 @@ Let us use the lists below in the following examples
|
|||||||
- name: myname02
|
- name: myname02
|
||||||
param01: [3, 4, 4, {key: value}]
|
param01: [3, 4, 4, {key: value}]
|
||||||
|
|
||||||
Example ``list_merge=replace`` (default):
|
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=replace` (default):
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
@@ -137,7 +137,7 @@ This produces:
|
|||||||
- 4
|
- 4
|
||||||
- key: value
|
- key: value
|
||||||
|
|
||||||
Example ``list_merge=keep``:
|
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=keep`:
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
@@ -165,7 +165,7 @@ This produces:
|
|||||||
- 2
|
- 2
|
||||||
- 3
|
- 3
|
||||||
|
|
||||||
Example ``list_merge=append``:
|
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=append`:
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
@@ -198,7 +198,7 @@ This produces:
|
|||||||
- 4
|
- 4
|
||||||
- key: value
|
- key: value
|
||||||
|
|
||||||
Example ``list_merge=prepend``:
|
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=prepend`:
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
@@ -231,7 +231,7 @@ This produces:
|
|||||||
- 2
|
- 2
|
||||||
- 3
|
- 3
|
||||||
|
|
||||||
Example ``list_merge=append_rp``:
|
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=append_rp`:
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
@@ -263,7 +263,7 @@ This produces:
|
|||||||
- 4
|
- 4
|
||||||
- key: value
|
- key: value
|
||||||
|
|
||||||
Example ``list_merge=prepend_rp``:
|
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=prepend_rp`:
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ Conversions
|
|||||||
Parsing CSV files
|
Parsing CSV files
|
||||||
^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Ansible offers the :ref:`community.general.read_csv module <ansible_collections.community.general.read_csv_module>` to read CSV files. Sometimes you need to convert strings to CSV files instead. For this, the ``from_csv`` filter exists.
|
Ansible offers the :ansplugin:`community.general.read_csv module <community.general.read_csv#module>` to read CSV files. Sometimes you need to convert strings to CSV files instead. For this, the :ansplugin:`community.general.from_csv filter <community.general.from_csv#filter>` exists.
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
@@ -42,7 +42,7 @@ This produces:
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
The ``from_csv`` filter has several keyword arguments to control its behavior:
|
The :ansplugin:`community.general.from_csv filter <community.general.from_csv#filter>` has several keyword arguments to control its behavior:
|
||||||
|
|
||||||
:dialect: Dialect of the CSV file. Default is ``excel``. Other possible choices are ``excel-tab`` and ``unix``. If one of ``delimiter``, ``skipinitialspace`` or ``strict`` is specified, ``dialect`` is ignored.
|
:dialect: Dialect of the CSV file. Default is ``excel``. Other possible choices are ``excel-tab`` and ``unix``. If one of ``delimiter``, ``skipinitialspace`` or ``strict`` is specified, ``dialect`` is ignored.
|
||||||
:fieldnames: A set of column names to use. If not provided, the first line of the CSV is assumed to contain the column names.
|
:fieldnames: A set of column names to use. If not provided, the first line of the CSV is assumed to contain the column names.
|
||||||
@@ -55,7 +55,7 @@ The ``from_csv`` filter has several keyword arguments to control its behavior:
|
|||||||
Converting to JSON
|
Converting to JSON
|
||||||
^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
`JC <https://pypi.org/project/jc/>`_ is a CLI tool and Python library which allows to interpret output of various CLI programs as JSON. It is also available as a filter in community.general. This filter needs the `jc Python library <https://pypi.org/project/jc/>`_ installed on the controller.
|
`JC <https://pypi.org/project/jc/>`_ is a CLI tool and Python library which allows to interpret output of various CLI programs as JSON. It is also available as a filter in community.general, called :ansplugin:`community.general.jc#filter`. This filter needs the `jc Python library <https://pypi.org/project/jc/>`_ installed on the controller.
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ The following filters allow to create identifiers.
|
|||||||
Hashids
|
Hashids
|
||||||
^^^^^^^
|
^^^^^^^
|
||||||
|
|
||||||
`Hashids <https://hashids.org/>`_ allow to convert sequences of integers to short unique string identifiers. This filter needs the `hashids Python library <https://pypi.org/project/hashids/>`_ installed on the controller.
|
`Hashids <https://hashids.org/>`_ allow to convert sequences of integers to short unique string identifiers. The :ansplugin:`community.general.hashids_encode#filter` and :ansplugin:`community.general.hashids_decode#filter` filters need the `hashids Python library <https://pypi.org/project/hashids/>`_ installed on the controller.
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
@@ -52,7 +52,7 @@ The hashids filters accept keyword arguments to allow fine-tuning the hashids ge
|
|||||||
Random MACs
|
Random MACs
|
||||||
^^^^^^^^^^^
|
^^^^^^^^^^^
|
||||||
|
|
||||||
You can use the ``random_mac`` filter to complete a partial `MAC address <https://en.wikipedia.org/wiki/MAC_address>`_ to a random 6-byte MAC address.
|
You can use the :ansplugin:`community.general.random_mac filter <community.general.random_mac#filter>` to complete a partial `MAC address <https://en.wikipedia.org/wiki/MAC_address>`_ to a random 6-byte MAC address.
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
|||||||
@@ -6,14 +6,4 @@
|
|||||||
Paths
|
Paths
|
||||||
-----
|
-----
|
||||||
|
|
||||||
The ``path_join`` filter has been added in ansible-base 2.10. If you want to use this filter, but also need to support Ansible 2.9, you can use ``community.general``'s ``path_join`` shim, ``community.general.path_join``. This filter redirects to ``path_join`` for ansible-base 2.10 and ansible-core 2.11 or newer, and re-implements the filter for Ansible 2.9.
|
The :ansplugin:`ansible.builtin.path_join filter <ansible.builtin.path_join#filter>` has been added in ansible-base 2.10. Community.general 3.0.0 and newer contains an alias ``community.general.path_join`` for this filter that could be used on Ansible 2.9 as well. Since community.general no longer supports Ansible 2.9, this is now a simple redirect to :ansplugin:`ansible.builtin.path_join filter <ansible.builtin.path_join#filter>`.
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
# ansible-base 2.10 or newer:
|
|
||||||
path: {{ ('/etc', path, 'subdir', file) | path_join }}
|
|
||||||
|
|
||||||
# Also works with Ansible 2.9:
|
|
||||||
path: {{ ('/etc', path, 'subdir', file) | community.general.path_join }}
|
|
||||||
|
|
||||||
.. versionadded:: 3.0.0
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
Selecting JSON data: JSON queries
|
Selecting JSON data: JSON queries
|
||||||
---------------------------------
|
---------------------------------
|
||||||
|
|
||||||
To select a single element or a data subset from a complex data structure in JSON format (for example, Ansible facts), use the ``json_query`` filter. The ``json_query`` filter lets you query a complex JSON structure and iterate over it using a loop structure.
|
To select a single element or a data subset from a complex data structure in JSON format (for example, Ansible facts), use the :ansplugin:`community.general.json_query filter <community.general.json_query#filter>`. The :ansplugin:`community.general.json_query#filter` filter lets you query a complex JSON structure and iterate over it using a loop structure.
|
||||||
|
|
||||||
.. note:: You must manually install the **jmespath** dependency on the Ansible controller before using this filter. This filter is built upon **jmespath**, and you can use the same syntax. For examples, see `jmespath examples <http://jmespath.org/examples.html>`_.
|
.. note:: You must manually install the **jmespath** dependency on the Ansible controller before using this filter. This filter is built upon **jmespath**, and you can use the same syntax. For examples, see `jmespath examples <http://jmespath.org/examples.html>`_.
|
||||||
|
|
||||||
@@ -146,4 +146,4 @@ To extract ports from all clusters with name containing 'server1':
|
|||||||
vars:
|
vars:
|
||||||
server_name_query: "domain.server[?contains(name,'server1')].port"
|
server_name_query: "domain.server[?contains(name,'server1')].port"
|
||||||
|
|
||||||
.. note:: while using ``starts_with`` and ``contains``, you have to use `` to_json | from_json `` filter for correct parsing of data structure.
|
.. note:: while using ``starts_with`` and ``contains``, you have to use ``to_json | from_json`` filter for correct parsing of data structure.
|
||||||
|
|||||||
@@ -6,9 +6,9 @@
|
|||||||
Working with times
|
Working with times
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
The ``to_time_unit`` filter allows to convert times from a human-readable string to a unit. For example, ``'4h 30min 12second' | community.general.to_time_unit('hour')`` gives the number of hours that correspond to 4 hours, 30 minutes and 12 seconds.
|
The :ansplugin:`community.general.to_time_unit filter <community.general.to_time_unit#filter>` allows to convert times from a human-readable string to a unit. For example, ``'4h 30min 12second' | community.general.to_time_unit('hour')`` gives the number of hours that correspond to 4 hours, 30 minutes and 12 seconds.
|
||||||
|
|
||||||
There are shorthands to directly convert to various units, like ``to_hours``, ``to_minutes``, ``to_seconds``, and so on. The following table lists all units that can be used:
|
There are shorthands to directly convert to various units, like :ansplugin:`community.general.to_hours#filter`, :ansplugin:`community.general.to_minutes#filter`, :ansplugin:`community.general.to_seconds#filter`, and so on. The following table lists all units that can be used:
|
||||||
|
|
||||||
.. list-table:: Units
|
.. list-table:: Units
|
||||||
:widths: 25 25 25 25
|
:widths: 25 25 25 25
|
||||||
@@ -21,37 +21,37 @@ There are shorthands to directly convert to various units, like ``to_hours``, ``
|
|||||||
* - Millisecond
|
* - Millisecond
|
||||||
- 1/1000 second
|
- 1/1000 second
|
||||||
- ``ms``, ``millisecond``, ``milliseconds``, ``msec``, ``msecs``, ``msecond``, ``mseconds``
|
- ``ms``, ``millisecond``, ``milliseconds``, ``msec``, ``msecs``, ``msecond``, ``mseconds``
|
||||||
- ``to_milliseconds``
|
- :ansplugin:`community.general.to_milliseconds#filter`
|
||||||
* - Second
|
* - Second
|
||||||
- 1 second
|
- 1 second
|
||||||
- ``s``, ``sec``, ``secs``, ``second``, ``seconds``
|
- ``s``, ``sec``, ``secs``, ``second``, ``seconds``
|
||||||
- ``to_seconds``
|
- :ansplugin:`community.general.to_seconds#filter`
|
||||||
* - Minute
|
* - Minute
|
||||||
- 60 seconds
|
- 60 seconds
|
||||||
- ``m``, ``min``, ``mins``, ``minute``, ``minutes``
|
- ``m``, ``min``, ``mins``, ``minute``, ``minutes``
|
||||||
- ``to_minutes``
|
- :ansplugin:`community.general.to_minutes#filter`
|
||||||
* - Hour
|
* - Hour
|
||||||
- 60*60 seconds
|
- 60*60 seconds
|
||||||
- ``h``, ``hour``, ``hours``
|
- ``h``, ``hour``, ``hours``
|
||||||
- ``to_hours``
|
- :ansplugin:`community.general.to_hours#filter`
|
||||||
* - Day
|
* - Day
|
||||||
- 24*60*60 seconds
|
- 24*60*60 seconds
|
||||||
- ``d``, ``day``, ``days``
|
- ``d``, ``day``, ``days``
|
||||||
- ``to_days``
|
- :ansplugin:`community.general.to_days#filter`
|
||||||
* - Week
|
* - Week
|
||||||
- 7*24*60*60 seconds
|
- 7*24*60*60 seconds
|
||||||
- ``w``, ``week``, ``weeks``
|
- ``w``, ``week``, ``weeks``
|
||||||
- ``to_weeks``
|
- :ansplugin:`community.general.to_weeks#filter`
|
||||||
* - Month
|
* - Month
|
||||||
- 30*24*60*60 seconds
|
- 30*24*60*60 seconds
|
||||||
- ``mo``, ``month``, ``months``
|
- ``mo``, ``month``, ``months``
|
||||||
- ``to_months``
|
- :ansplugin:`community.general.to_months#filter`
|
||||||
* - Year
|
* - Year
|
||||||
- 365*24*60*60 seconds
|
- 365*24*60*60 seconds
|
||||||
- ``y``, ``year``, ``years``
|
- ``y``, ``year``, ``years``
|
||||||
- ``to_years``
|
- :ansplugin:`community.general.to_years#filter`
|
||||||
|
|
||||||
Note that months and years are using a simplified representation: a month is 30 days, and a year is 365 days. If you need different definitions of months or years, you can pass them as keyword arguments. For example, if you want a year to be 365.25 days, and a month to be 30.5 days, you can write ``'11months 4' | community.general.to_years(year=365.25, month=30.5)``. These keyword arguments can be specified to ``to_time_unit`` and to all shorthand filters.
|
Note that months and years are using a simplified representation: a month is 30 days, and a year is 365 days. If you need different definitions of months or years, you can pass them as keyword arguments. For example, if you want a year to be 365.25 days, and a month to be 30.5 days, you can write ``'11months 4' | community.general.to_years(year=365.25, month=30.5)``. These keyword arguments can be specified to :ansplugin:`community.general.to_time_unit#filter` and to all shorthand filters.
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
|||||||
@@ -6,9 +6,9 @@
|
|||||||
Working with Unicode
|
Working with Unicode
|
||||||
---------------------
|
---------------------
|
||||||
|
|
||||||
`Unicode <https://unicode.org/main.html>`_ makes it possible to produce two strings which may be visually equivalent, but are comprised of distinctly different characters/character sequences. To address this ``Unicode`` defines `normalization forms <https://unicode.org/reports/tr15/>`_ which avoid these distinctions by choosing a unique character sequence for a given visual representation.
|
`Unicode <https://unicode.org/main.html>`_ makes it possible to produce two strings which may be visually equivalent, but are comprised of distinctly different characters/character sequences. To address this Unicode defines `normalization forms <https://unicode.org/reports/tr15/>`_ which avoid these distinctions by choosing a unique character sequence for a given visual representation.
|
||||||
|
|
||||||
You can use the ``community.general.unicode_normalize`` filter to normalize ``Unicode`` strings within your playbooks.
|
You can use the :ansplugin:`community.general.unicode_normalize filter <community.general.unicode_normalize#filter>` to normalize Unicode strings within your playbooks.
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
@@ -28,7 +28,7 @@ This produces:
|
|||||||
"msg": true
|
"msg": true
|
||||||
}
|
}
|
||||||
|
|
||||||
The ``community.general.unicode_normalize`` filter accepts a keyword argument to select the ``Unicode`` form used to normalize the input string.
|
The :ansplugin:`community.general.unicode_normalize filter <community.general.unicode_normalize#filter>` accepts a keyword argument :ansopt:`community.general.unicode_normalize#filter:form` to select the Unicode form used to normalize the input string.
|
||||||
|
|
||||||
:form: One of ``'NFC'`` (default), ``'NFD'``, ``'NFKC'``, or ``'NFKD'``. See the `Unicode reference <https://unicode.org/reports/tr15/>`_ for more information.
|
:form: One of ``'NFC'`` (default), ``'NFD'``, ``'NFKC'``, or ``'NFKD'``. See the `Unicode reference <https://unicode.org/reports/tr15/>`_ for more information.
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
Working with versions
|
Working with versions
|
||||||
---------------------
|
---------------------
|
||||||
|
|
||||||
If you need to sort a list of version numbers, the Jinja ``sort`` filter is problematic. Since it sorts lexicographically, ``2.10`` will come before ``2.9``. To treat version numbers correctly, you can use the ``version_sort`` filter:
|
If you need to sort a list of version numbers, the Jinja ``sort`` filter is problematic. Since it sorts lexicographically, ``2.10`` will come before ``2.9``. To treat version numbers correctly, you can use the :ansplugin:`community.general.version_sort filter <community.general.version_sort#filter>`:
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ The :ref:`community.general collection <plugins_in_community.general>` offers cu
|
|||||||
Feature Tests
|
Feature Tests
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
The ``a_module`` test allows to check whether a given string refers to an existing module or action plugin. This can be useful in roles, which can use this to ensure that required modules are present ahead of time.
|
The :ansplugin:`community.general.a_module test <community.general.a_module#test>` allows to check whether a given string refers to an existing module or action plugin. This can be useful in roles, which can use this to ensure that required modules are present ahead of time.
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
.. code-block:: yaml+jinja
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
namespace: community
|
namespace: community
|
||||||
name: general
|
name: general
|
||||||
version: 7.2.1
|
version: 7.5.4
|
||||||
readme: README.md
|
readme: README.md
|
||||||
authors:
|
authors:
|
||||||
- Ansible (https://github.com/ansible)
|
- Ansible (https://github.com/ansible)
|
||||||
|
|||||||
@@ -44,7 +44,7 @@ class ActionModule(ActionBase):
|
|||||||
|
|
||||||
def _async_result(self, async_status_args, task_vars, timeout):
|
def _async_result(self, async_status_args, task_vars, timeout):
|
||||||
'''
|
'''
|
||||||
Retrieve results of the asynchonous task, and display them in place of
|
Retrieve results of the asynchronous task, and display them in place of
|
||||||
the async wrapper results (those with the ansible_job_id key).
|
the async wrapper results (those with the ansible_job_id key).
|
||||||
'''
|
'''
|
||||||
async_status = self._task.copy()
|
async_status = self._task.copy()
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ class ActionModule(ActionBase):
|
|||||||
SHUTDOWN_COMMAND_ARGS = {
|
SHUTDOWN_COMMAND_ARGS = {
|
||||||
'alpine': '',
|
'alpine': '',
|
||||||
'void': '-h +{delay_min} "{message}"',
|
'void': '-h +{delay_min} "{message}"',
|
||||||
'freebsd': '-h +{delay_sec}s "{message}"',
|
'freebsd': '-p +{delay_sec}s "{message}"',
|
||||||
'linux': DEFAULT_SHUTDOWN_COMMAND_ARGS,
|
'linux': DEFAULT_SHUTDOWN_COMMAND_ARGS,
|
||||||
'macosx': '-h +{delay_min} "{message}"',
|
'macosx': '-h +{delay_min} "{message}"',
|
||||||
'openbsd': '-h +{delay_min} "{message}"',
|
'openbsd': '-h +{delay_min} "{message}"',
|
||||||
|
|||||||
2
plugins/cache/redis.py
vendored
2
plugins/cache/redis.py
vendored
@@ -150,7 +150,7 @@ class CacheModule(BaseCacheModule):
|
|||||||
# format: "localhost:26379;localhost2:26379;0:changeme"
|
# format: "localhost:26379;localhost2:26379;0:changeme"
|
||||||
connections = uri.split(';')
|
connections = uri.split(';')
|
||||||
connection_args = connections.pop(-1)
|
connection_args = connections.pop(-1)
|
||||||
if len(connection_args) > 0: # hanle if no db nr is given
|
if len(connection_args) > 0: # handle if no db nr is given
|
||||||
connection_args = connection_args.split(':')
|
connection_args = connection_args.split(':')
|
||||||
kw['db'] = connection_args.pop(0)
|
kw['db'] = connection_args.pop(0)
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -84,6 +84,7 @@ import time
|
|||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
from contextlib import closing
|
||||||
from os.path import basename
|
from os.path import basename
|
||||||
|
|
||||||
from ansible.errors import AnsibleError, AnsibleRuntimeError
|
from ansible.errors import AnsibleError, AnsibleRuntimeError
|
||||||
@@ -201,24 +202,25 @@ class ElasticSource(object):
|
|||||||
|
|
||||||
apm_cli = self.init_apm_client(apm_server_url, apm_service_name, apm_verify_server_cert, apm_secret_token, apm_api_key)
|
apm_cli = self.init_apm_client(apm_server_url, apm_service_name, apm_verify_server_cert, apm_secret_token, apm_api_key)
|
||||||
if apm_cli:
|
if apm_cli:
|
||||||
instrument() # Only call this once, as early as possible.
|
with closing(apm_cli):
|
||||||
if traceparent:
|
instrument() # Only call this once, as early as possible.
|
||||||
parent = trace_parent_from_string(traceparent)
|
if traceparent:
|
||||||
apm_cli.begin_transaction("Session", trace_parent=parent, start=parent_start_time)
|
parent = trace_parent_from_string(traceparent)
|
||||||
else:
|
apm_cli.begin_transaction("Session", trace_parent=parent, start=parent_start_time)
|
||||||
apm_cli.begin_transaction("Session", start=parent_start_time)
|
else:
|
||||||
# Populate trace metadata attributes
|
apm_cli.begin_transaction("Session", start=parent_start_time)
|
||||||
if self.ansible_version is not None:
|
# Populate trace metadata attributes
|
||||||
label(ansible_version=self.ansible_version)
|
if self.ansible_version is not None:
|
||||||
label(ansible_session=self.session, ansible_host_name=self.host, ansible_host_user=self.user)
|
label(ansible_version=self.ansible_version)
|
||||||
if self.ip_address is not None:
|
label(ansible_session=self.session, ansible_host_name=self.host, ansible_host_user=self.user)
|
||||||
label(ansible_host_ip=self.ip_address)
|
if self.ip_address is not None:
|
||||||
|
label(ansible_host_ip=self.ip_address)
|
||||||
|
|
||||||
for task_data in tasks:
|
for task_data in tasks:
|
||||||
for host_uuid, host_data in task_data.host_data.items():
|
for host_uuid, host_data in task_data.host_data.items():
|
||||||
self.create_span_data(apm_cli, task_data, host_data)
|
self.create_span_data(apm_cli, task_data, host_data)
|
||||||
|
|
||||||
apm_cli.end_transaction(name=__name__, result=status, duration=end_time - parent_start_time)
|
apm_cli.end_transaction(name=__name__, result=status, duration=end_time - parent_start_time)
|
||||||
|
|
||||||
def create_span_data(self, apm_cli, task_data, host_data):
|
def create_span_data(self, apm_cli, task_data, host_data):
|
||||||
""" create the span with the given TaskData and HostData """
|
""" create the span with the given TaskData and HostData """
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ DOCUMENTATION = '''
|
|||||||
requirements:
|
requirements:
|
||||||
- whitelisting in configuration
|
- whitelisting in configuration
|
||||||
- certifi (Python library)
|
- certifi (Python library)
|
||||||
- flatdict (Python library), if you want to use the 'flatten' option
|
- flatdict (Python library), if you want to use the O(flatten) option
|
||||||
options:
|
options:
|
||||||
api:
|
api:
|
||||||
description: URI to the Logentries API.
|
description: URI to the Logentries API.
|
||||||
@@ -90,9 +90,9 @@ examples: >
|
|||||||
api = data.logentries.com
|
api = data.logentries.com
|
||||||
port = 10000
|
port = 10000
|
||||||
tls_port = 20000
|
tls_port = 20000
|
||||||
use_tls = no
|
use_tls = true
|
||||||
token = dd21fc88-f00a-43ff-b977-e3a4233c53af
|
token = dd21fc88-f00a-43ff-b977-e3a4233c53af
|
||||||
flatten = False
|
flatten = false
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -196,15 +196,11 @@ else:
|
|||||||
class TLSSocketAppender(PlainTextSocketAppender):
|
class TLSSocketAppender(PlainTextSocketAppender):
|
||||||
def open_connection(self):
|
def open_connection(self):
|
||||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
sock = ssl.wrap_socket(
|
context = ssl.create_default_context(
|
||||||
|
purpose=ssl.Purpose.SERVER_AUTH,
|
||||||
|
cafile=certifi.where(), )
|
||||||
|
sock = context.wrap_socket(
|
||||||
sock=sock,
|
sock=sock,
|
||||||
keyfile=None,
|
|
||||||
certfile=None,
|
|
||||||
server_side=False,
|
|
||||||
cert_reqs=ssl.CERT_REQUIRED,
|
|
||||||
ssl_version=getattr(
|
|
||||||
ssl, 'PROTOCOL_TLSv1_2', ssl.PROTOCOL_TLSv1),
|
|
||||||
ca_certs=certifi.where(),
|
|
||||||
do_handshake_on_connect=True,
|
do_handshake_on_connect=True,
|
||||||
suppress_ragged_eofs=True, )
|
suppress_ragged_eofs=True, )
|
||||||
sock.connect((self.LE_API, self.LE_TLS_PORT))
|
sock.connect((self.LE_API, self.LE_TLS_PORT))
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ DOCUMENTATION = '''
|
|||||||
short_description: Post task results to a Nagios server through nrdp
|
short_description: Post task results to a Nagios server through nrdp
|
||||||
description:
|
description:
|
||||||
- This callback send playbook result to Nagios.
|
- This callback send playbook result to Nagios.
|
||||||
- Nagios shall use NRDP to recive passive events.
|
- Nagios shall use NRDP to receive passive events.
|
||||||
- The passive check is sent to a dedicated host/service for Ansible.
|
- The passive check is sent to a dedicated host/service for Ansible.
|
||||||
options:
|
options:
|
||||||
url:
|
url:
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ DOCUMENTATION = '''
|
|||||||
- set as main display callback
|
- set as main display callback
|
||||||
short_description: Don't display stuff to screen
|
short_description: Don't display stuff to screen
|
||||||
description:
|
description:
|
||||||
- This callback prevents outputing events to screen.
|
- This callback prevents outputting events to screen.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
from ansible.plugins.callback import CallbackBase
|
from ansible.plugins.callback import CallbackBase
|
||||||
@@ -24,7 +24,7 @@ from ansible.plugins.callback import CallbackBase
|
|||||||
class CallbackModule(CallbackBase):
|
class CallbackModule(CallbackBase):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
This callback wont print messages to stdout when new callback events are received.
|
This callback won't print messages to stdout when new callback events are received.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
CALLBACK_VERSION = 2.0
|
CALLBACK_VERSION = 2.0
|
||||||
|
|||||||
@@ -115,8 +115,8 @@ class CallbackModule(CallbackBase):
|
|||||||
line_length = 120
|
line_length = 120
|
||||||
if self.last_skipped:
|
if self.last_skipped:
|
||||||
print()
|
print()
|
||||||
msg = colorize("# {0} {1}".format(task_name,
|
line = "# {0} ".format(task_name)
|
||||||
'*' * (line_length - len(task_name))), 'bold')
|
msg = colorize("{0}{1}".format(line, '*' * (line_length - len(line))), 'bold')
|
||||||
print(msg)
|
print(msg)
|
||||||
|
|
||||||
def _indent_text(self, text, indent_level):
|
def _indent_text(self, text, indent_level):
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright (c) 2017, Allyson Bowles <@akatch>
|
# Copyright (c) 2023, Al Bowles <@akatch>
|
||||||
# Copyright (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
# Copyright (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
@@ -11,7 +11,7 @@ __metaclass__ = type
|
|||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
name: unixy
|
name: unixy
|
||||||
type: stdout
|
type: stdout
|
||||||
author: Allyson Bowles (@akatch)
|
author: Al Bowles (@akatch)
|
||||||
short_description: condensed Ansible output
|
short_description: condensed Ansible output
|
||||||
description:
|
description:
|
||||||
- Consolidated Ansible output in the style of LINUX/UNIX startup logs.
|
- Consolidated Ansible output in the style of LINUX/UNIX startup logs.
|
||||||
@@ -40,7 +40,6 @@ class CallbackModule(CallbackModule_default):
|
|||||||
- Only display task names if the task runs on at least one host
|
- Only display task names if the task runs on at least one host
|
||||||
- Add option to display all hostnames on a single line in the appropriate result color (failures may have a separate line)
|
- Add option to display all hostnames on a single line in the appropriate result color (failures may have a separate line)
|
||||||
- Consolidate stats display
|
- Consolidate stats display
|
||||||
- Display whether run is in --check mode
|
|
||||||
- Don't show play name if no hosts found
|
- Don't show play name if no hosts found
|
||||||
'''
|
'''
|
||||||
|
|
||||||
@@ -92,19 +91,31 @@ class CallbackModule(CallbackModule_default):
|
|||||||
def v2_playbook_on_task_start(self, task, is_conditional):
|
def v2_playbook_on_task_start(self, task, is_conditional):
|
||||||
self._get_task_display_name(task)
|
self._get_task_display_name(task)
|
||||||
if self.task_display_name is not None:
|
if self.task_display_name is not None:
|
||||||
self._display.display("%s..." % self.task_display_name)
|
if task.check_mode and self.get_option('check_mode_markers'):
|
||||||
|
self._display.display("%s (check mode)..." % self.task_display_name)
|
||||||
|
else:
|
||||||
|
self._display.display("%s..." % self.task_display_name)
|
||||||
|
|
||||||
def v2_playbook_on_handler_task_start(self, task):
|
def v2_playbook_on_handler_task_start(self, task):
|
||||||
self._get_task_display_name(task)
|
self._get_task_display_name(task)
|
||||||
if self.task_display_name is not None:
|
if self.task_display_name is not None:
|
||||||
self._display.display("%s (via handler)... " % self.task_display_name)
|
if task.check_mode and self.get_option('check_mode_markers'):
|
||||||
|
self._display.display("%s (via handler in check mode)... " % self.task_display_name)
|
||||||
|
else:
|
||||||
|
self._display.display("%s (via handler)... " % self.task_display_name)
|
||||||
|
|
||||||
def v2_playbook_on_play_start(self, play):
|
def v2_playbook_on_play_start(self, play):
|
||||||
name = play.get_name().strip()
|
name = play.get_name().strip()
|
||||||
if name and play.hosts:
|
if play.check_mode and self.get_option('check_mode_markers'):
|
||||||
msg = u"\n- %s on hosts: %s -" % (name, ",".join(play.hosts))
|
if name and play.hosts:
|
||||||
|
msg = u"\n- %s (in check mode) on hosts: %s -" % (name, ",".join(play.hosts))
|
||||||
|
else:
|
||||||
|
msg = u"- check mode -"
|
||||||
else:
|
else:
|
||||||
msg = u"---"
|
if name and play.hosts:
|
||||||
|
msg = u"\n- %s on hosts: %s -" % (name, ",".join(play.hosts))
|
||||||
|
else:
|
||||||
|
msg = u"---"
|
||||||
|
|
||||||
self._display.display(msg)
|
self._display.display(msg)
|
||||||
|
|
||||||
@@ -227,8 +238,10 @@ class CallbackModule(CallbackModule_default):
|
|||||||
self._display.display(" Ran out of hosts!", color=C.COLOR_ERROR)
|
self._display.display(" Ran out of hosts!", color=C.COLOR_ERROR)
|
||||||
|
|
||||||
def v2_playbook_on_start(self, playbook):
|
def v2_playbook_on_start(self, playbook):
|
||||||
# TODO display whether this run is happening in check mode
|
if context.CLIARGS['check'] and self.get_option('check_mode_markers'):
|
||||||
self._display.display("Executing playbook %s" % basename(playbook._file_name))
|
self._display.display("Executing playbook %s in check mode" % basename(playbook._file_name))
|
||||||
|
else:
|
||||||
|
self._display.display("Executing playbook %s" % basename(playbook._file_name))
|
||||||
|
|
||||||
# show CLI arguments
|
# show CLI arguments
|
||||||
if self._display.verbosity > 3:
|
if self._display.verbosity > 3:
|
||||||
|
|||||||
@@ -46,11 +46,26 @@ DOCUMENTATION = '''
|
|||||||
vars:
|
vars:
|
||||||
- name: ansible_chroot_exe
|
- name: ansible_chroot_exe
|
||||||
default: chroot
|
default: chroot
|
||||||
|
disable_root_check:
|
||||||
|
description:
|
||||||
|
- Do not check that the user is not root.
|
||||||
|
ini:
|
||||||
|
- section: chroot_connection
|
||||||
|
key: disable_root_check
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_CHROOT_DISABLE_ROOT_CHECK
|
||||||
|
vars:
|
||||||
|
- name: ansible_chroot_disable_root_check
|
||||||
|
default: false
|
||||||
|
type: bool
|
||||||
|
version_added: 7.3.0
|
||||||
'''
|
'''
|
||||||
|
|
||||||
EXAMPLES = r"""
|
EXAMPLES = r"""
|
||||||
# Static inventory file
|
# Plugin requires root privileges for chroot, -E preserves your env (and location of ~/.ansible):
|
||||||
|
# sudo -E ansible-playbook ...
|
||||||
#
|
#
|
||||||
|
# Static inventory file
|
||||||
# [chroots]
|
# [chroots]
|
||||||
# /path/to/debootstrap
|
# /path/to/debootstrap
|
||||||
# /path/to/feboostrap
|
# /path/to/feboostrap
|
||||||
@@ -100,11 +115,7 @@ class Connection(ConnectionBase):
|
|||||||
|
|
||||||
self.chroot = self._play_context.remote_addr
|
self.chroot = self._play_context.remote_addr
|
||||||
|
|
||||||
if os.geteuid() != 0:
|
# do some trivial checks for ensuring 'host' is actually a chroot'able dir
|
||||||
raise AnsibleError("chroot connection requires running as root")
|
|
||||||
|
|
||||||
# we're running as root on the local system so do some
|
|
||||||
# trivial checks for ensuring 'host' is actually a chroot'able dir
|
|
||||||
if not os.path.isdir(self.chroot):
|
if not os.path.isdir(self.chroot):
|
||||||
raise AnsibleError("%s is not a directory" % self.chroot)
|
raise AnsibleError("%s is not a directory" % self.chroot)
|
||||||
|
|
||||||
@@ -118,6 +129,11 @@ class Connection(ConnectionBase):
|
|||||||
|
|
||||||
def _connect(self):
|
def _connect(self):
|
||||||
""" connect to the chroot """
|
""" connect to the chroot """
|
||||||
|
if not self.get_option('disable_root_check') and os.geteuid() != 0:
|
||||||
|
raise AnsibleError(
|
||||||
|
"chroot connection requires running as root. "
|
||||||
|
"You can override this check with the `disable_root_check` option.")
|
||||||
|
|
||||||
if os.path.isabs(self.get_option('chroot_exe')):
|
if os.path.isabs(self.get_option('chroot_exe')):
|
||||||
self.chroot_cmd = self.get_option('chroot_exe')
|
self.chroot_cmd = self.get_option('chroot_exe')
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ class Connection(ConnectionBase):
|
|||||||
if in_data:
|
if in_data:
|
||||||
raise AnsibleError("Internal Error: this module does not support optimized module pipelining")
|
raise AnsibleError("Internal Error: this module does not support optimized module pipelining")
|
||||||
|
|
||||||
# totally ignores privlege escalation
|
# totally ignores privilege escalation
|
||||||
display.vvv("EXEC %s" % cmd, host=self.host)
|
display.vvv("EXEC %s" % cmd, host=self.host)
|
||||||
p = self.client.command.run(cmd)[self.host]
|
p = self.client.command.run(cmd)[self.host]
|
||||||
return p[0], p[1], p[2]
|
return p[0], p[1], p[2]
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ DOCUMENTATION = '''
|
|||||||
- Container identifier
|
- Container identifier
|
||||||
default: inventory_hostname
|
default: inventory_hostname
|
||||||
vars:
|
vars:
|
||||||
|
- name: inventory_hostname
|
||||||
- name: ansible_host
|
- name: ansible_host
|
||||||
- name: ansible_lxc_host
|
- name: ansible_lxc_host
|
||||||
executable:
|
executable:
|
||||||
@@ -59,7 +60,7 @@ class Connection(ConnectionBase):
|
|||||||
def __init__(self, play_context, new_stdin, *args, **kwargs):
|
def __init__(self, play_context, new_stdin, *args, **kwargs):
|
||||||
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
|
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
|
||||||
|
|
||||||
self.container_name = self._play_context.remote_addr
|
self.container_name = None
|
||||||
self.container = None
|
self.container = None
|
||||||
|
|
||||||
def _connect(self):
|
def _connect(self):
|
||||||
@@ -67,12 +68,14 @@ class Connection(ConnectionBase):
|
|||||||
super(Connection, self)._connect()
|
super(Connection, self)._connect()
|
||||||
|
|
||||||
if not HAS_LIBLXC:
|
if not HAS_LIBLXC:
|
||||||
msg = "lxc bindings for python2 are not installed"
|
msg = "lxc python bindings are not installed"
|
||||||
raise errors.AnsibleError(msg)
|
raise errors.AnsibleError(msg)
|
||||||
|
|
||||||
if self.container:
|
if self.container:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
self.container_name = self.get_option('remote_addr')
|
||||||
|
|
||||||
self._display.vvv("THIS IS A LOCAL LXC DIR", host=self.container_name)
|
self._display.vvv("THIS IS A LOCAL LXC DIR", host=self.container_name)
|
||||||
self.container = _lxc.Container(self.container_name)
|
self.container = _lxc.Container(self.container_name)
|
||||||
if self.container.state == "STOPPED":
|
if self.container.state == "STOPPED":
|
||||||
@@ -117,7 +120,7 @@ class Connection(ConnectionBase):
|
|||||||
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
||||||
|
|
||||||
# python2-lxc needs bytes. python3-lxc needs text.
|
# python2-lxc needs bytes. python3-lxc needs text.
|
||||||
executable = to_native(self._play_context.executable, errors='surrogate_or_strict')
|
executable = to_native(self.get_option('executable'), errors='surrogate_or_strict')
|
||||||
local_cmd = [executable, '-c', to_native(cmd, errors='surrogate_or_strict')]
|
local_cmd = [executable, '-c', to_native(cmd, errors='surrogate_or_strict')]
|
||||||
|
|
||||||
read_stdout, write_stdout = None, None
|
read_stdout, write_stdout = None, None
|
||||||
|
|||||||
@@ -10,9 +10,9 @@ __metaclass__ = type
|
|||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
author: Matt Clay (@mattclay) <matt@mystile.com>
|
author: Matt Clay (@mattclay) <matt@mystile.com>
|
||||||
name: lxd
|
name: lxd
|
||||||
short_description: Run tasks in lxc containers via lxc CLI
|
short_description: Run tasks in LXD instances via C(lxc) CLI
|
||||||
description:
|
description:
|
||||||
- Run commands or put/fetch files to an existing lxc container using lxc CLI
|
- Run commands or put/fetch files to an existing instance using C(lxc) CLI.
|
||||||
options:
|
options:
|
||||||
remote_addr:
|
remote_addr:
|
||||||
description:
|
description:
|
||||||
@@ -24,7 +24,7 @@ DOCUMENTATION = '''
|
|||||||
- name: ansible_lxd_host
|
- name: ansible_lxd_host
|
||||||
executable:
|
executable:
|
||||||
description:
|
description:
|
||||||
- shell to use for execution inside container
|
- Shell to use for execution inside instance.
|
||||||
default: /bin/sh
|
default: /bin/sh
|
||||||
vars:
|
vars:
|
||||||
- name: ansible_executable
|
- name: ansible_executable
|
||||||
@@ -69,7 +69,7 @@ class Connection(ConnectionBase):
|
|||||||
raise AnsibleError("lxc command not found in PATH")
|
raise AnsibleError("lxc command not found in PATH")
|
||||||
|
|
||||||
if self._play_context.remote_user is not None and self._play_context.remote_user != 'root':
|
if self._play_context.remote_user is not None and self._play_context.remote_user != 'root':
|
||||||
self._display.warning('lxd does not support remote_user, using container default: root')
|
self._display.warning('lxd does not support remote_user, using default: root')
|
||||||
|
|
||||||
def _connect(self):
|
def _connect(self):
|
||||||
"""connect to lxd (nothing to do here) """
|
"""connect to lxd (nothing to do here) """
|
||||||
|
|||||||
@@ -47,6 +47,6 @@ options:
|
|||||||
type: str
|
type: str
|
||||||
token:
|
token:
|
||||||
description:
|
description:
|
||||||
- ACL token for authentification.
|
- ACL token for authentication.
|
||||||
type: str
|
type: str
|
||||||
'''
|
'''
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ EXAMPLES = '''
|
|||||||
- name: Parse a CSV file's contents
|
- name: Parse a CSV file's contents
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: >-
|
msg: >-
|
||||||
{{ csv_data | community.genera.from_csv(dialect='unix') }}
|
{{ csv_data | community.general.from_csv(dialect='unix') }}
|
||||||
vars:
|
vars:
|
||||||
csv_data: |
|
csv_data: |
|
||||||
Column 1,Value
|
Column 1,Value
|
||||||
|
|||||||
@@ -42,6 +42,12 @@ DOCUMENTATION = '''
|
|||||||
description: Fallback to cached results if connection to cobbler fails.
|
description: Fallback to cached results if connection to cobbler fails.
|
||||||
type: boolean
|
type: boolean
|
||||||
default: false
|
default: false
|
||||||
|
exclude_mgmt_classes:
|
||||||
|
description: Management classes to exclude from inventory.
|
||||||
|
type: list
|
||||||
|
default: []
|
||||||
|
elements: str
|
||||||
|
version_added: 7.4.0
|
||||||
exclude_profiles:
|
exclude_profiles:
|
||||||
description:
|
description:
|
||||||
- Profiles to exclude from inventory.
|
- Profiles to exclude from inventory.
|
||||||
@@ -49,6 +55,12 @@ DOCUMENTATION = '''
|
|||||||
type: list
|
type: list
|
||||||
default: []
|
default: []
|
||||||
elements: str
|
elements: str
|
||||||
|
include_mgmt_classes:
|
||||||
|
description: Management classes to include from inventory.
|
||||||
|
type: list
|
||||||
|
default: []
|
||||||
|
elements: str
|
||||||
|
version_added: 7.4.0
|
||||||
include_profiles:
|
include_profiles:
|
||||||
description:
|
description:
|
||||||
- Profiles to include from inventory.
|
- Profiles to include from inventory.
|
||||||
@@ -216,6 +228,8 @@ class InventoryModule(BaseInventoryPlugin, Cacheable):
|
|||||||
self.cache_key = self.get_cache_key(path)
|
self.cache_key = self.get_cache_key(path)
|
||||||
self.use_cache = cache and self.get_option('cache')
|
self.use_cache = cache and self.get_option('cache')
|
||||||
|
|
||||||
|
self.exclude_mgmt_classes = self.get_option('exclude_mgmt_classes')
|
||||||
|
self.include_mgmt_classes = self.get_option('include_mgmt_classes')
|
||||||
self.exclude_profiles = self.get_option('exclude_profiles')
|
self.exclude_profiles = self.get_option('exclude_profiles')
|
||||||
self.include_profiles = self.get_option('include_profiles')
|
self.include_profiles = self.get_option('include_profiles')
|
||||||
self.group_by = self.get_option('group_by')
|
self.group_by = self.get_option('group_by')
|
||||||
@@ -265,9 +279,16 @@ class InventoryModule(BaseInventoryPlugin, Cacheable):
|
|||||||
hostname = host['hostname'] # None
|
hostname = host['hostname'] # None
|
||||||
interfaces = host['interfaces']
|
interfaces = host['interfaces']
|
||||||
|
|
||||||
if self._exclude_profile(host['profile']):
|
if set(host['mgmt_classes']) & set(self.include_mgmt_classes):
|
||||||
self.display.vvvv('Excluding host %s in profile %s\n' % (host['name'], host['profile']))
|
self.display.vvvv('Including host %s in mgmt_classes %s\n' % (host['name'], host['mgmt_classes']))
|
||||||
continue
|
else:
|
||||||
|
if self._exclude_profile(host['profile']):
|
||||||
|
self.display.vvvv('Excluding host %s in profile %s\n' % (host['name'], host['profile']))
|
||||||
|
continue
|
||||||
|
|
||||||
|
if set(host['mgmt_classes']) & set(self.exclude_mgmt_classes):
|
||||||
|
self.display.vvvv('Excluding host %s in mgmt_classes %s\n' % (host['name'], host['mgmt_classes']))
|
||||||
|
continue
|
||||||
|
|
||||||
# hostname is often empty for non-static IP hosts
|
# hostname is often empty for non-static IP hosts
|
||||||
if hostname == '':
|
if hostname == '':
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ DOCUMENTATION = r'''
|
|||||||
- You need to set this password on the lxd server before
|
- You need to set this password on the lxd server before
|
||||||
running this module using the following command
|
running this module using the following command
|
||||||
C(lxc config set core.trust_password <some random password>)
|
C(lxc config set core.trust_password <some random password>)
|
||||||
See U(https://www.stgraber.org/2016/04/18/lxd-api-direct-interaction/).
|
See U(https://documentation.ubuntu.com/lxd/en/latest/authentication/#adding-client-certificates-using-a-trust-password).
|
||||||
- If O(trust_password) is set, this module send a request for authentication before sending any requests.
|
- If O(trust_password) is set, this module send a request for authentication before sending any requests.
|
||||||
type: str
|
type: str
|
||||||
state:
|
state:
|
||||||
@@ -70,7 +70,7 @@ DOCUMENTATION = r'''
|
|||||||
version_added: 4.2.0
|
version_added: 4.2.0
|
||||||
prefered_instance_network_interface:
|
prefered_instance_network_interface:
|
||||||
description:
|
description:
|
||||||
- If an instance has multiple network interfaces, select which one is the prefered as pattern.
|
- If an instance has multiple network interfaces, select which one is the preferred as pattern.
|
||||||
- Combined with the first number that can be found e.g. 'eth' + 0.
|
- Combined with the first number that can be found e.g. 'eth' + 0.
|
||||||
- The option has been renamed from O(prefered_container_network_interface) to O(prefered_instance_network_interface)
|
- The option has been renamed from O(prefered_container_network_interface) to O(prefered_instance_network_interface)
|
||||||
in community.general 3.8.0. The old name still works as an alias.
|
in community.general 3.8.0. The old name still works as an alias.
|
||||||
@@ -80,7 +80,7 @@ DOCUMENTATION = r'''
|
|||||||
- prefered_container_network_interface
|
- prefered_container_network_interface
|
||||||
prefered_instance_network_family:
|
prefered_instance_network_family:
|
||||||
description:
|
description:
|
||||||
- If an instance has multiple network interfaces, which one is the prefered by family.
|
- If an instance has multiple network interfaces, which one is the preferred by family.
|
||||||
- Specify V(inet) for IPv4 and V(inet6) for IPv6.
|
- Specify V(inet) for IPv4 and V(inet6) for IPv6.
|
||||||
type: str
|
type: str
|
||||||
default: inet
|
default: inet
|
||||||
@@ -359,7 +359,7 @@ class InventoryModule(BaseInventoryPlugin):
|
|||||||
Kwargs:
|
Kwargs:
|
||||||
None
|
None
|
||||||
Source:
|
Source:
|
||||||
https://github.com/lxc/lxd/blob/master/doc/rest-api.md
|
https://documentation.ubuntu.com/lxd/en/latest/rest-api/
|
||||||
Raises:
|
Raises:
|
||||||
None
|
None
|
||||||
Returns:
|
Returns:
|
||||||
@@ -376,7 +376,7 @@ class InventoryModule(BaseInventoryPlugin):
|
|||||||
def get_instance_data(self, names):
|
def get_instance_data(self, names):
|
||||||
"""Create Inventory of the instance
|
"""Create Inventory of the instance
|
||||||
|
|
||||||
Iterate through the different branches of the instances and collect Informations.
|
Iterate through the different branches of the instances and collect Information.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
list(names): List of instance names
|
list(names): List of instance names
|
||||||
@@ -398,7 +398,7 @@ class InventoryModule(BaseInventoryPlugin):
|
|||||||
def get_network_data(self, names):
|
def get_network_data(self, names):
|
||||||
"""Create Inventory of the instance
|
"""Create Inventory of the instance
|
||||||
|
|
||||||
Iterate through the different branches of the instances and collect Informations.
|
Iterate through the different branches of the instances and collect Information.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
list(names): List of instance names
|
list(names): List of instance names
|
||||||
@@ -451,12 +451,12 @@ class InventoryModule(BaseInventoryPlugin):
|
|||||||
return network_configuration
|
return network_configuration
|
||||||
|
|
||||||
def get_prefered_instance_network_interface(self, instance_name):
|
def get_prefered_instance_network_interface(self, instance_name):
|
||||||
"""Helper to get the prefered interface of thr instance
|
"""Helper to get the preferred interface of thr instance
|
||||||
|
|
||||||
Helper to get the prefered interface provide by neme pattern from 'prefered_instance_network_interface'.
|
Helper to get the preferred interface provide by neme pattern from 'prefered_instance_network_interface'.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
str(containe_name): name of instance
|
str(instance_name): name of instance
|
||||||
Kwargs:
|
Kwargs:
|
||||||
None
|
None
|
||||||
Raises:
|
Raises:
|
||||||
@@ -481,7 +481,7 @@ class InventoryModule(BaseInventoryPlugin):
|
|||||||
Helper to get the VLAN_ID from the instance
|
Helper to get the VLAN_ID from the instance
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
str(containe_name): name of instance
|
str(instance_name): name of instance
|
||||||
Kwargs:
|
Kwargs:
|
||||||
None
|
None
|
||||||
Raises:
|
Raises:
|
||||||
@@ -563,7 +563,7 @@ class InventoryModule(BaseInventoryPlugin):
|
|||||||
else:
|
else:
|
||||||
path[instance_name][key] = value
|
path[instance_name][key] = value
|
||||||
except KeyError as err:
|
except KeyError as err:
|
||||||
raise AnsibleParserError("Unable to store Informations: {0}".format(to_native(err)))
|
raise AnsibleParserError("Unable to store Information: {0}".format(to_native(err)))
|
||||||
|
|
||||||
def extract_information_from_instance_configs(self):
|
def extract_information_from_instance_configs(self):
|
||||||
"""Process configuration information
|
"""Process configuration information
|
||||||
@@ -683,7 +683,7 @@ class InventoryModule(BaseInventoryPlugin):
|
|||||||
continue
|
continue
|
||||||
# add instance
|
# add instance
|
||||||
self.inventory.add_host(instance_name)
|
self.inventory.add_host(instance_name)
|
||||||
# add network informations
|
# add network information
|
||||||
self.build_inventory_network(instance_name)
|
self.build_inventory_network(instance_name)
|
||||||
# add os
|
# add os
|
||||||
v = self._get_data_entry('inventory/{0}/os'.format(instance_name))
|
v = self._get_data_entry('inventory/{0}/os'.format(instance_name))
|
||||||
|
|||||||
@@ -85,6 +85,11 @@ DOCUMENTATION = '''
|
|||||||
type: boolean
|
type: boolean
|
||||||
default: false
|
default: false
|
||||||
version_added: 6.1.0
|
version_added: 6.1.0
|
||||||
|
use_arp_ping:
|
||||||
|
description: Whether to always (V(true)) use the quick ARP ping or (V(false)) a slower but more reliable method.
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
version_added: 7.4.0
|
||||||
notes:
|
notes:
|
||||||
- At least one of ipv4 or ipv6 is required to be True, both can be True, but they cannot both be False.
|
- At least one of ipv4 or ipv6 is required to be True, both can be True, but they cannot both be False.
|
||||||
- 'TODO: add OS fingerprinting'
|
- 'TODO: add OS fingerprinting'
|
||||||
@@ -196,40 +201,43 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
|||||||
# setup command
|
# setup command
|
||||||
cmd = [self._nmap]
|
cmd = [self._nmap]
|
||||||
|
|
||||||
if self._options['sudo']:
|
if self.get_option('sudo'):
|
||||||
cmd.insert(0, 'sudo')
|
cmd.insert(0, 'sudo')
|
||||||
|
|
||||||
if self._options['port']:
|
if self.get_option('port'):
|
||||||
cmd.append('-p')
|
cmd.append('-p')
|
||||||
cmd.append(self._options['port'])
|
cmd.append(self.get_option('port'))
|
||||||
|
|
||||||
if not self._options['ports']:
|
if not self.get_option('ports'):
|
||||||
cmd.append('-sP')
|
cmd.append('-sP')
|
||||||
|
|
||||||
if self._options['ipv4'] and not self._options['ipv6']:
|
if self.get_option('ipv4') and not self.get_option('ipv6'):
|
||||||
cmd.append('-4')
|
cmd.append('-4')
|
||||||
elif self._options['ipv6'] and not self._options['ipv4']:
|
elif self.get_option('ipv6') and not self.get_option('ipv4'):
|
||||||
cmd.append('-6')
|
cmd.append('-6')
|
||||||
elif not self._options['ipv6'] and not self._options['ipv4']:
|
elif not self.get_option('ipv6') and not self.get_option('ipv4'):
|
||||||
raise AnsibleParserError('One of ipv4 or ipv6 must be enabled for this plugin')
|
raise AnsibleParserError('One of ipv4 or ipv6 must be enabled for this plugin')
|
||||||
|
|
||||||
if self._options['exclude']:
|
if self.get_option('exclude'):
|
||||||
cmd.append('--exclude')
|
cmd.append('--exclude')
|
||||||
cmd.append(','.join(self._options['exclude']))
|
cmd.append(','.join(self.get_option('exclude')))
|
||||||
|
|
||||||
if self._options['dns_resolve']:
|
if self.get_option('dns_resolve'):
|
||||||
cmd.append('-n')
|
cmd.append('-n')
|
||||||
|
|
||||||
if self._options['udp_scan']:
|
if self.get_option('udp_scan'):
|
||||||
cmd.append('-sU')
|
cmd.append('-sU')
|
||||||
|
|
||||||
if self._options['icmp_timestamp']:
|
if self.get_option('icmp_timestamp'):
|
||||||
cmd.append('-PP')
|
cmd.append('-PP')
|
||||||
|
|
||||||
if self._options['open']:
|
if self.get_option('open'):
|
||||||
cmd.append('--open')
|
cmd.append('--open')
|
||||||
|
|
||||||
cmd.append(self._options['address'])
|
if not self.get_option('use_arp_ping'):
|
||||||
|
cmd.append('--disable-arp-ping')
|
||||||
|
|
||||||
|
cmd.append(self.get_option('address'))
|
||||||
try:
|
try:
|
||||||
# execute
|
# execute
|
||||||
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
|
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
|
||||||
|
|||||||
@@ -102,7 +102,7 @@ DOCUMENTATION = '''
|
|||||||
type: bool
|
type: bool
|
||||||
qemu_extended_statuses:
|
qemu_extended_statuses:
|
||||||
description:
|
description:
|
||||||
- Requires O(want_facts) to be set to V(true) to function. This will allow you to differentiate betweend C(paused) and C(prelaunch)
|
- Requires O(want_facts) to be set to V(true) to function. This will allow you to differentiate between C(paused) and C(prelaunch)
|
||||||
statuses of the QEMU VMs.
|
statuses of the QEMU VMs.
|
||||||
- This introduces multiple groups [prefixed with O(group_prefix)] C(prelaunch) and C(paused).
|
- This introduces multiple groups [prefixed with O(group_prefix)] C(prelaunch) and C(paused).
|
||||||
default: false
|
default: false
|
||||||
|
|||||||
@@ -25,7 +25,10 @@ DOCUMENTATION = """
|
|||||||
type: list
|
type: list
|
||||||
elements: str
|
elements: str
|
||||||
search:
|
search:
|
||||||
description: Field to retrieve, for example V(name) or V(id).
|
description:
|
||||||
|
- Field to retrieve, for example V(name) or V(id).
|
||||||
|
- If set to V(id), only zero or one element can be returned.
|
||||||
|
Use the Jinja C(first) filter to get the only list element.
|
||||||
type: str
|
type: str
|
||||||
default: name
|
default: name
|
||||||
version_added: 5.7.0
|
version_added: 5.7.0
|
||||||
@@ -39,27 +42,27 @@ DOCUMENTATION = """
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
EXAMPLES = """
|
EXAMPLES = """
|
||||||
- name: "Get 'password' from Bitwarden record named 'a_test'"
|
- name: "Get 'password' from all Bitwarden records named 'a_test'"
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: >-
|
msg: >-
|
||||||
{{ lookup('community.general.bitwarden', 'a_test', field='password') }}
|
{{ lookup('community.general.bitwarden', 'a_test', field='password') }}
|
||||||
|
|
||||||
- name: "Get 'password' from Bitwarden record with id 'bafba515-af11-47e6-abe3-af1200cd18b2'"
|
- name: "Get 'password' from Bitwarden record with ID 'bafba515-af11-47e6-abe3-af1200cd18b2'"
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: >-
|
msg: >-
|
||||||
{{ lookup('community.general.bitwarden', 'bafba515-af11-47e6-abe3-af1200cd18b2', search='id', field='password') }}
|
{{ lookup('community.general.bitwarden', 'bafba515-af11-47e6-abe3-af1200cd18b2', search='id', field='password') | first }}
|
||||||
|
|
||||||
- name: "Get 'password' from Bitwarden record named 'a_test' from collection"
|
- name: "Get 'password' from all Bitwarden records named 'a_test' from collection"
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: >-
|
msg: >-
|
||||||
{{ lookup('community.general.bitwarden', 'a_test', field='password', collection_id='bafba515-af11-47e6-abe3-af1200cd18b2') }}
|
{{ lookup('community.general.bitwarden', 'a_test', field='password', collection_id='bafba515-af11-47e6-abe3-af1200cd18b2') }}
|
||||||
|
|
||||||
- name: "Get full Bitwarden record named 'a_test'"
|
- name: "Get list of all full Bitwarden records named 'a_test'"
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: >-
|
msg: >-
|
||||||
{{ lookup('community.general.bitwarden', 'a_test') }}
|
{{ lookup('community.general.bitwarden', 'a_test') }}
|
||||||
|
|
||||||
- name: "Get custom field 'api_key' from Bitwarden record named 'a_test'"
|
- name: "Get custom field 'api_key' from all Bitwarden records named 'a_test'"
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: >-
|
msg: >-
|
||||||
{{ lookup('community.general.bitwarden', 'a_test', field='api_key') }}
|
{{ lookup('community.general.bitwarden', 'a_test', field='api_key') }}
|
||||||
@@ -67,9 +70,12 @@ EXAMPLES = """
|
|||||||
|
|
||||||
RETURN = """
|
RETURN = """
|
||||||
_raw:
|
_raw:
|
||||||
description: List of requested field or JSON object of list of matches.
|
description:
|
||||||
|
- A one-element list that contains a list of requested fields or JSON objects of matches.
|
||||||
|
- If you use C(query), you get a list of lists. If you use C(lookup) without C(wantlist=true),
|
||||||
|
this always gets reduced to a list of field values or JSON objects.
|
||||||
type: list
|
type: list
|
||||||
elements: raw
|
elements: list
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from subprocess import Popen, PIPE
|
from subprocess import Popen, PIPE
|
||||||
@@ -132,20 +138,29 @@ class Bitwarden(object):
|
|||||||
If field is None, return the whole record for each match.
|
If field is None, return the whole record for each match.
|
||||||
"""
|
"""
|
||||||
matches = self._get_matches(search_value, search_field, collection_id)
|
matches = self._get_matches(search_value, search_field, collection_id)
|
||||||
|
if not field:
|
||||||
if field in ['autofillOnPageLoad', 'password', 'passwordRevisionDate', 'totp', 'uris', 'username']:
|
|
||||||
return [match['login'][field] for match in matches]
|
|
||||||
elif not field:
|
|
||||||
return matches
|
return matches
|
||||||
else:
|
field_matches = []
|
||||||
custom_field_matches = []
|
for match in matches:
|
||||||
for match in matches:
|
# if there are no custom fields, then `match` has no key 'fields'
|
||||||
|
if 'fields' in match:
|
||||||
|
custom_field_found = False
|
||||||
for custom_field in match['fields']:
|
for custom_field in match['fields']:
|
||||||
if custom_field['name'] == field:
|
if field == custom_field['name']:
|
||||||
custom_field_matches.append(custom_field['value'])
|
field_matches.append(custom_field['value'])
|
||||||
if matches and not custom_field_matches:
|
custom_field_found = True
|
||||||
raise AnsibleError("Custom field {field} does not exist in {search_value}".format(field=field, search_value=search_value))
|
break
|
||||||
return custom_field_matches
|
if custom_field_found:
|
||||||
|
continue
|
||||||
|
if 'login' in match and field in match['login']:
|
||||||
|
field_matches.append(match['login'][field])
|
||||||
|
continue
|
||||||
|
if field in match:
|
||||||
|
field_matches.append(match[field])
|
||||||
|
continue
|
||||||
|
if matches and not field_matches:
|
||||||
|
raise AnsibleError("field {field} does not exist in {search_value}".format(field=field, search_value=search_value))
|
||||||
|
return field_matches
|
||||||
|
|
||||||
|
|
||||||
class LookupModule(LookupBase):
|
class LookupModule(LookupBase):
|
||||||
|
|||||||
@@ -70,6 +70,11 @@ DOCUMENTATION = '''
|
|||||||
- "Class."
|
- "Class."
|
||||||
type: str
|
type: str
|
||||||
default: 'IN'
|
default: 'IN'
|
||||||
|
tcp:
|
||||||
|
description: Use TCP to lookup DNS records.
|
||||||
|
default: false
|
||||||
|
type: bool
|
||||||
|
version_added: 7.5.0
|
||||||
notes:
|
notes:
|
||||||
- ALL is not a record per-se, merely the listed fields are available for any record results you retrieve in the form of a dictionary.
|
- ALL is not a record per-se, merely the listed fields are available for any record results you retrieve in the form of a dictionary.
|
||||||
- While the 'dig' lookup plugin supports anything which dnspython supports out of the box, only a subset can be converted into a dictionary.
|
- While the 'dig' lookup plugin supports anything which dnspython supports out of the box, only a subset can be converted into a dictionary.
|
||||||
@@ -329,6 +334,7 @@ class LookupModule(LookupBase):
|
|||||||
flat = self.get_option('flat')
|
flat = self.get_option('flat')
|
||||||
fail_on_error = self.get_option('fail_on_error')
|
fail_on_error = self.get_option('fail_on_error')
|
||||||
real_empty = self.get_option('real_empty')
|
real_empty = self.get_option('real_empty')
|
||||||
|
tcp = self.get_option('tcp')
|
||||||
try:
|
try:
|
||||||
rdclass = dns.rdataclass.from_text(self.get_option('class'))
|
rdclass = dns.rdataclass.from_text(self.get_option('class'))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -375,6 +381,8 @@ class LookupModule(LookupBase):
|
|||||||
fail_on_error = boolean(arg)
|
fail_on_error = boolean(arg)
|
||||||
elif opt == 'real_empty':
|
elif opt == 'real_empty':
|
||||||
real_empty = boolean(arg)
|
real_empty = boolean(arg)
|
||||||
|
elif opt == 'tcp':
|
||||||
|
tcp = boolean(arg)
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -408,7 +416,7 @@ class LookupModule(LookupBase):
|
|||||||
|
|
||||||
for domain in domains:
|
for domain in domains:
|
||||||
try:
|
try:
|
||||||
answers = myres.query(domain, qtype, rdclass=rdclass)
|
answers = myres.query(domain, qtype, rdclass=rdclass, tcp=tcp)
|
||||||
for rdata in answers:
|
for rdata in answers:
|
||||||
s = rdata.to_text()
|
s = rdata.to_text()
|
||||||
if qtype.upper() == 'TXT':
|
if qtype.upper() == 'TXT':
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright (c) 2018, Scott Buchanan <sbuchanan@ri.pn>
|
# Copyright (c) 2018, Scott Buchanan <scott@buchanan.works>
|
||||||
# Copyright (c) 2016, Andrew Zenk <azenk@umn.edu> (lastpass.py used as starting point)
|
# Copyright (c) 2016, Andrew Zenk <azenk@umn.edu> (lastpass.py used as starting point)
|
||||||
# Copyright (c) 2018, Ansible Project
|
# Copyright (c) 2018, Ansible Project
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
@@ -38,6 +38,10 @@ DOCUMENTATION = '''
|
|||||||
type: str
|
type: str
|
||||||
subdomain:
|
subdomain:
|
||||||
description: The 1Password subdomain to authenticate against.
|
description: The 1Password subdomain to authenticate against.
|
||||||
|
account_id:
|
||||||
|
description: The account ID to target.
|
||||||
|
type: str
|
||||||
|
version_added: 7.5.0
|
||||||
username:
|
username:
|
||||||
description: The username used to sign in.
|
description: The username used to sign in.
|
||||||
secret_key:
|
secret_key:
|
||||||
@@ -55,6 +59,7 @@ DOCUMENTATION = '''
|
|||||||
performed an initial sign in (meaning C(~/.op/config), C(~/.config/op/config) or C(~/.config/.op/config) exists), then only the
|
performed an initial sign in (meaning C(~/.op/config), C(~/.config/op/config) or C(~/.config/.op/config) exists), then only the
|
||||||
C(master_password) is required. You may optionally specify O(subdomain) in this scenario, otherwise the last used subdomain will be used by C(op).
|
C(master_password) is required. You may optionally specify O(subdomain) in this scenario, otherwise the last used subdomain will be used by C(op).
|
||||||
- This lookup can perform an initial login by providing O(subdomain), O(username), O(secret_key), and O(master_password).
|
- This lookup can perform an initial login by providing O(subdomain), O(username), O(secret_key), and O(master_password).
|
||||||
|
- Can target a specific account by providing the O(account_id).
|
||||||
- Due to the B(very) sensitive nature of these credentials, it is B(highly) recommended that you only pass in the minimal credentials
|
- Due to the B(very) sensitive nature of these credentials, it is B(highly) recommended that you only pass in the minimal credentials
|
||||||
needed at any given time. Also, store these credentials in an Ansible Vault using a key that is equal to or greater in strength
|
needed at any given time. Also, store these credentials in an Ansible Vault using a key that is equal to or greater in strength
|
||||||
to the 1Password master password.
|
to the 1Password master password.
|
||||||
@@ -93,6 +98,12 @@ EXAMPLES = """
|
|||||||
master_password=vault_master_password,
|
master_password=vault_master_password,
|
||||||
username='tweety@acme.com',
|
username='tweety@acme.com',
|
||||||
secret_key=vault_secret_key)
|
secret_key=vault_secret_key)
|
||||||
|
|
||||||
|
- name: Retrieve password from specific account
|
||||||
|
ansible.builtin.debug:
|
||||||
|
var: lookup('community.general.onepassword',
|
||||||
|
'HAL 9000',
|
||||||
|
account_id='abc123')
|
||||||
"""
|
"""
|
||||||
|
|
||||||
RETURN = """
|
RETURN = """
|
||||||
@@ -116,16 +127,34 @@ from ansible.module_utils.six import with_metaclass
|
|||||||
from ansible_collections.community.general.plugins.module_utils.onepassword import OnePasswordConfig
|
from ansible_collections.community.general.plugins.module_utils.onepassword import OnePasswordConfig
|
||||||
|
|
||||||
|
|
||||||
|
def _lower_if_possible(value):
|
||||||
|
"""Return the lower case version value, otherwise return the value"""
|
||||||
|
try:
|
||||||
|
return value.lower()
|
||||||
|
except AttributeError:
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
class OnePassCLIBase(with_metaclass(abc.ABCMeta, object)):
|
class OnePassCLIBase(with_metaclass(abc.ABCMeta, object)):
|
||||||
bin = "op"
|
bin = "op"
|
||||||
|
|
||||||
def __init__(self, subdomain=None, domain="1password.com", username=None, secret_key=None, master_password=None, service_account_token=None):
|
def __init__(
|
||||||
|
self,
|
||||||
|
subdomain=None,
|
||||||
|
domain="1password.com",
|
||||||
|
username=None,
|
||||||
|
secret_key=None,
|
||||||
|
master_password=None,
|
||||||
|
service_account_token=None,
|
||||||
|
account_id=None,
|
||||||
|
):
|
||||||
self.subdomain = subdomain
|
self.subdomain = subdomain
|
||||||
self.domain = domain
|
self.domain = domain
|
||||||
self.username = username
|
self.username = username
|
||||||
self.master_password = master_password
|
self.master_password = master_password
|
||||||
self.secret_key = secret_key
|
self.secret_key = secret_key
|
||||||
self.service_account_token = service_account_token
|
self.service_account_token = service_account_token
|
||||||
|
self.account_id = account_id
|
||||||
|
|
||||||
self._path = None
|
self._path = None
|
||||||
self._version = None
|
self._version = None
|
||||||
@@ -293,7 +322,9 @@ class OnePassCLIv1(OnePassCLIBase):
|
|||||||
|
|
||||||
def assert_logged_in(self):
|
def assert_logged_in(self):
|
||||||
args = ["get", "account"]
|
args = ["get", "account"]
|
||||||
if self.subdomain:
|
if self.account_id:
|
||||||
|
args.extend(["--account", self.account_id])
|
||||||
|
elif self.subdomain:
|
||||||
account = "{subdomain}.{domain}".format(subdomain=self.subdomain, domain=self.domain)
|
account = "{subdomain}.{domain}".format(subdomain=self.subdomain, domain=self.domain)
|
||||||
args.extend(["--account", account])
|
args.extend(["--account", account])
|
||||||
|
|
||||||
@@ -326,6 +357,10 @@ class OnePassCLIv1(OnePassCLIBase):
|
|||||||
|
|
||||||
def get_raw(self, item_id, vault=None, token=None):
|
def get_raw(self, item_id, vault=None, token=None):
|
||||||
args = ["get", "item", item_id]
|
args = ["get", "item", item_id]
|
||||||
|
|
||||||
|
if self.account_id:
|
||||||
|
args.extend(["--account", self.account_id])
|
||||||
|
|
||||||
if vault is not None:
|
if vault is not None:
|
||||||
args += ["--vault={0}".format(vault)]
|
args += ["--vault={0}".format(vault)]
|
||||||
|
|
||||||
@@ -453,6 +488,7 @@ class OnePassCLIv2(OnePassCLIBase):
|
|||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
data = json.loads(data_json)
|
data = json.loads(data_json)
|
||||||
|
field_name = _lower_if_possible(field_name)
|
||||||
for field in data.get("fields", []):
|
for field in data.get("fields", []):
|
||||||
if section_title is None:
|
if section_title is None:
|
||||||
# If the field name exists in the section, return that value
|
# If the field name exists in the section, return that value
|
||||||
@@ -461,24 +497,26 @@ class OnePassCLIv2(OnePassCLIBase):
|
|||||||
|
|
||||||
# If the field name doesn't exist in the section, match on the value of "label"
|
# If the field name doesn't exist in the section, match on the value of "label"
|
||||||
# then "id" and return "value"
|
# then "id" and return "value"
|
||||||
if field.get("label") == field_name:
|
if field.get("label", "").lower() == field_name:
|
||||||
return field["value"]
|
return field.get("value", "")
|
||||||
|
|
||||||
if field.get("id") == field_name:
|
if field.get("id", "").lower() == field_name:
|
||||||
return field["value"]
|
return field.get("value", "")
|
||||||
|
|
||||||
# Look at the section data and get an indentifier. The value of 'id' is either a unique ID
|
# Look at the section data and get an identifier. The value of 'id' is either a unique ID
|
||||||
# or a human-readable string. If a 'label' field exists, prefer that since
|
# or a human-readable string. If a 'label' field exists, prefer that since
|
||||||
# it is the value visible in the 1Password UI when both 'id' and 'label' exist.
|
# it is the value visible in the 1Password UI when both 'id' and 'label' exist.
|
||||||
section = field.get("section", {})
|
section = field.get("section", {})
|
||||||
current_section_title = section.get("label", section.get("id"))
|
section_title = _lower_if_possible(section_title)
|
||||||
|
|
||||||
|
current_section_title = section.get("label", section.get("id", "")).lower()
|
||||||
if section_title == current_section_title:
|
if section_title == current_section_title:
|
||||||
# In the correct section. Check "label" then "id" for the desired field_name
|
# In the correct section. Check "label" then "id" for the desired field_name
|
||||||
if field.get("label") == field_name:
|
if field.get("label") == field_name:
|
||||||
return field["value"]
|
return field.get("value", "")
|
||||||
|
|
||||||
if field.get("id") == field_name:
|
if field.get("id") == field_name:
|
||||||
return field["value"]
|
return field.get("value", "")
|
||||||
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
@@ -502,7 +540,9 @@ class OnePassCLIv2(OnePassCLIBase):
|
|||||||
# an interactive prompt. Only run 'op account get' after first listing accounts to see
|
# an interactive prompt. Only run 'op account get' after first listing accounts to see
|
||||||
# if there are any previously configured accounts.
|
# if there are any previously configured accounts.
|
||||||
args = ["account", "get"]
|
args = ["account", "get"]
|
||||||
if self.subdomain:
|
if self.account_id:
|
||||||
|
args.extend(["--account", self.account_id])
|
||||||
|
elif self.subdomain:
|
||||||
account = "{subdomain}.{domain}".format(subdomain=self.subdomain, domain=self.domain)
|
account = "{subdomain}.{domain}".format(subdomain=self.subdomain, domain=self.domain)
|
||||||
args.extend(["--account", account])
|
args.extend(["--account", account])
|
||||||
|
|
||||||
@@ -533,6 +573,10 @@ class OnePassCLIv2(OnePassCLIBase):
|
|||||||
|
|
||||||
def get_raw(self, item_id, vault=None, token=None):
|
def get_raw(self, item_id, vault=None, token=None):
|
||||||
args = ["item", "get", item_id, "--format", "json"]
|
args = ["item", "get", item_id, "--format", "json"]
|
||||||
|
|
||||||
|
if self.account_id:
|
||||||
|
args.extend(["--account", self.account_id])
|
||||||
|
|
||||||
if vault is not None:
|
if vault is not None:
|
||||||
args += ["--vault={0}".format(vault)]
|
args += ["--vault={0}".format(vault)]
|
||||||
|
|
||||||
@@ -559,13 +603,14 @@ class OnePassCLIv2(OnePassCLIBase):
|
|||||||
|
|
||||||
class OnePass(object):
|
class OnePass(object):
|
||||||
def __init__(self, subdomain=None, domain="1password.com", username=None, secret_key=None, master_password=None,
|
def __init__(self, subdomain=None, domain="1password.com", username=None, secret_key=None, master_password=None,
|
||||||
service_account_token=None):
|
service_account_token=None, account_id=None):
|
||||||
self.subdomain = subdomain
|
self.subdomain = subdomain
|
||||||
self.domain = domain
|
self.domain = domain
|
||||||
self.username = username
|
self.username = username
|
||||||
self.secret_key = secret_key
|
self.secret_key = secret_key
|
||||||
self.master_password = master_password
|
self.master_password = master_password
|
||||||
self.service_account_token = service_account_token
|
self.service_account_token = service_account_token
|
||||||
|
self.account_id = account_id
|
||||||
|
|
||||||
self.logged_in = False
|
self.logged_in = False
|
||||||
self.token = None
|
self.token = None
|
||||||
@@ -578,7 +623,7 @@ class OnePass(object):
|
|||||||
for cls in OnePassCLIBase.__subclasses__():
|
for cls in OnePassCLIBase.__subclasses__():
|
||||||
if cls.supports_version == version.split(".")[0]:
|
if cls.supports_version == version.split(".")[0]:
|
||||||
try:
|
try:
|
||||||
return cls(self.subdomain, self.domain, self.username, self.secret_key, self.master_password, self.service_account_token)
|
return cls(self.subdomain, self.domain, self.username, self.secret_key, self.master_password, self.service_account_token, self.account_id)
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
raise AnsibleLookupError(e)
|
raise AnsibleLookupError(e)
|
||||||
|
|
||||||
@@ -642,8 +687,9 @@ class LookupModule(LookupBase):
|
|||||||
secret_key = self.get_option("secret_key")
|
secret_key = self.get_option("secret_key")
|
||||||
master_password = self.get_option("master_password")
|
master_password = self.get_option("master_password")
|
||||||
service_account_token = self.get_option("service_account_token")
|
service_account_token = self.get_option("service_account_token")
|
||||||
|
account_id = self.get_option("account_id")
|
||||||
|
|
||||||
op = OnePass(subdomain, domain, username, secret_key, master_password, service_account_token)
|
op = OnePass(subdomain, domain, username, secret_key, master_password, service_account_token, account_id)
|
||||||
op.assert_logged_in()
|
op.assert_logged_in()
|
||||||
|
|
||||||
values = []
|
values = []
|
||||||
|
|||||||
@@ -35,6 +35,10 @@ DOCUMENTATION = '''
|
|||||||
version_added: 6.0.0
|
version_added: 6.0.0
|
||||||
default: '1password.com'
|
default: '1password.com'
|
||||||
type: str
|
type: str
|
||||||
|
account_id:
|
||||||
|
description: The account ID to target.
|
||||||
|
type: str
|
||||||
|
version_added: 7.5.0
|
||||||
username:
|
username:
|
||||||
description: The username used to sign in.
|
description: The username used to sign in.
|
||||||
secret_key:
|
secret_key:
|
||||||
@@ -52,6 +56,7 @@ DOCUMENTATION = '''
|
|||||||
performed an initial sign in (meaning C(~/.op/config exists)), then only the O(master_password) is required.
|
performed an initial sign in (meaning C(~/.op/config exists)), then only the O(master_password) is required.
|
||||||
You may optionally specify O(subdomain) in this scenario, otherwise the last used subdomain will be used by C(op).
|
You may optionally specify O(subdomain) in this scenario, otherwise the last used subdomain will be used by C(op).
|
||||||
- This lookup can perform an initial login by providing O(subdomain), O(username), O(secret_key), and O(master_password).
|
- This lookup can perform an initial login by providing O(subdomain), O(username), O(secret_key), and O(master_password).
|
||||||
|
- Can target a specific account by providing the O(account_id).
|
||||||
- Due to the B(very) sensitive nature of these credentials, it is B(highly) recommended that you only pass in the minimal credentials
|
- Due to the B(very) sensitive nature of these credentials, it is B(highly) recommended that you only pass in the minimal credentials
|
||||||
needed at any given time. Also, store these credentials in an Ansible Vault using a key that is equal to or greater in strength
|
needed at any given time. Also, store these credentials in an Ansible Vault using a key that is equal to or greater in strength
|
||||||
to the 1Password master password.
|
to the 1Password master password.
|
||||||
@@ -96,8 +101,9 @@ class LookupModule(LookupBase):
|
|||||||
secret_key = self.get_option("secret_key")
|
secret_key = self.get_option("secret_key")
|
||||||
master_password = self.get_option("master_password")
|
master_password = self.get_option("master_password")
|
||||||
service_account_token = self.get_option("service_account_token")
|
service_account_token = self.get_option("service_account_token")
|
||||||
|
account_id = self.get_option("account_id")
|
||||||
|
|
||||||
op = OnePass(subdomain, domain, username, secret_key, master_password, service_account_token)
|
op = OnePass(subdomain, domain, username, secret_key, master_password, service_account_token, account_id)
|
||||||
op.assert_logged_in()
|
op.assert_logged_in()
|
||||||
|
|
||||||
values = []
|
values = []
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ DOCUMENTATION = r"""
|
|||||||
type: int
|
type: int
|
||||||
override_special:
|
override_special:
|
||||||
description:
|
description:
|
||||||
- Overide a list of special characters to use in the string.
|
- Override a list of special characters to use in the string.
|
||||||
- If set O(min_special) should be set to a non-default value.
|
- If set O(min_special) should be set to a non-default value.
|
||||||
type: str
|
type: str
|
||||||
override_all:
|
override_all:
|
||||||
@@ -80,6 +80,19 @@ DOCUMENTATION = r"""
|
|||||||
- Override all values of O(numbers), O(upper), O(lower), and O(special) with
|
- Override all values of O(numbers), O(upper), O(lower), and O(special) with
|
||||||
the given list of characters.
|
the given list of characters.
|
||||||
type: str
|
type: str
|
||||||
|
ignore_similar_chars:
|
||||||
|
description:
|
||||||
|
- Ignore similar characters, such as V(l) and V(1), or V(O) and V(0).
|
||||||
|
- These characters can be configured in O(similar_chars).
|
||||||
|
default: false
|
||||||
|
type: bool
|
||||||
|
version_added: 7.5.0
|
||||||
|
similar_chars:
|
||||||
|
description:
|
||||||
|
- Override a list of characters not to be use in the string.
|
||||||
|
default: "il1LoO0"
|
||||||
|
type: str
|
||||||
|
version_added: 7.5.0
|
||||||
base64:
|
base64:
|
||||||
description:
|
description:
|
||||||
- Returns base64 encoded string.
|
- Returns base64 encoded string.
|
||||||
@@ -103,7 +116,7 @@ EXAMPLES = r"""
|
|||||||
var: lookup('community.general.random_string', base64=True)
|
var: lookup('community.general.random_string', base64=True)
|
||||||
# Example result: ['NHZ6eWN5Qk0=']
|
# Example result: ['NHZ6eWN5Qk0=']
|
||||||
|
|
||||||
- name: Generate a random string with 1 lower, 1 upper, 1 number and 1 special char (atleast)
|
- name: Generate a random string with 1 lower, 1 upper, 1 number and 1 special char (at least)
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
var: lookup('community.general.random_string', min_lower=1, min_upper=1, min_special=1, min_numeric=1)
|
var: lookup('community.general.random_string', min_lower=1, min_upper=1, min_special=1, min_numeric=1)
|
||||||
# Example result: ['&Qw2|E[-']
|
# Example result: ['&Qw2|E[-']
|
||||||
@@ -173,9 +186,17 @@ class LookupModule(LookupBase):
|
|||||||
length = self.get_option("length")
|
length = self.get_option("length")
|
||||||
base64_flag = self.get_option("base64")
|
base64_flag = self.get_option("base64")
|
||||||
override_all = self.get_option("override_all")
|
override_all = self.get_option("override_all")
|
||||||
|
ignore_similar_chars = self.get_option("ignore_similar_chars")
|
||||||
|
similar_chars = self.get_option("similar_chars")
|
||||||
values = ""
|
values = ""
|
||||||
available_chars_set = ""
|
available_chars_set = ""
|
||||||
|
|
||||||
|
if ignore_similar_chars:
|
||||||
|
number_chars = "".join([sc for sc in number_chars if sc not in similar_chars])
|
||||||
|
lower_chars = "".join([sc for sc in lower_chars if sc not in similar_chars])
|
||||||
|
upper_chars = "".join([sc for sc in upper_chars if sc not in similar_chars])
|
||||||
|
special_chars = "".join([sc for sc in special_chars if sc not in similar_chars])
|
||||||
|
|
||||||
if override_all:
|
if override_all:
|
||||||
# Override all the values
|
# Override all the values
|
||||||
available_chars_set = override_all
|
available_chars_set = override_all
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import absolute_import, division, print_function
|
||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
from ansible.module_utils.common.collections import is_sequence
|
from ansible.module_utils.common.collections import is_sequence
|
||||||
@@ -204,12 +205,17 @@ class CmdRunner(object):
|
|||||||
environ_update = {}
|
environ_update = {}
|
||||||
self.environ_update = environ_update
|
self.environ_update = environ_update
|
||||||
|
|
||||||
self.command[0] = module.get_bin_path(self.command[0], opt_dirs=path_prefix, required=True)
|
_cmd = self.command[0]
|
||||||
|
self.command[0] = _cmd if (os.path.isabs(_cmd) or '/' in _cmd) else module.get_bin_path(_cmd, opt_dirs=path_prefix, required=True)
|
||||||
|
|
||||||
for mod_param_name, spec in iteritems(module.argument_spec):
|
for mod_param_name, spec in iteritems(module.argument_spec):
|
||||||
if mod_param_name not in self.arg_formats:
|
if mod_param_name not in self.arg_formats:
|
||||||
self.arg_formats[mod_param_name] = _Format.as_default_type(spec.get('type', 'str'), mod_param_name)
|
self.arg_formats[mod_param_name] = _Format.as_default_type(spec.get('type', 'str'), mod_param_name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def binary(self):
|
||||||
|
return self.command[0]
|
||||||
|
|
||||||
def __call__(self, args_order=None, output_process=None, ignore_value_none=True, check_mode_skip=False, check_mode_return=None, **kwargs):
|
def __call__(self, args_order=None, output_process=None, ignore_value_none=True, check_mode_skip=False, check_mode_return=None, **kwargs):
|
||||||
if output_process is None:
|
if output_process is None:
|
||||||
output_process = _process_as_is
|
output_process = _process_as_is
|
||||||
|
|||||||
29
plugins/module_utils/consul.py
Normal file
29
plugins/module_utils/consul.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright (c) 2022, Håkon Lerring
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
def get_consul_url(configuration):
|
||||||
|
return '%s://%s:%s/v1' % (configuration.scheme,
|
||||||
|
configuration.host, configuration.port)
|
||||||
|
|
||||||
|
|
||||||
|
def get_auth_headers(configuration):
|
||||||
|
if configuration.token is None:
|
||||||
|
return {}
|
||||||
|
else:
|
||||||
|
return {'X-Consul-Token': configuration.token}
|
||||||
|
|
||||||
|
|
||||||
|
class RequestError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def handle_consul_response_error(response):
|
||||||
|
if 400 <= response.status_code < 600:
|
||||||
|
raise RequestError('%d %s' % (response.status_code, response.content))
|
||||||
@@ -39,7 +39,7 @@ except ImportError:
|
|||||||
LIBCLOUD_IMP_ERR = traceback.format_exc()
|
LIBCLOUD_IMP_ERR = traceback.format_exc()
|
||||||
HAS_LIBCLOUD = False
|
HAS_LIBCLOUD = False
|
||||||
|
|
||||||
# MCP 2.x version patten for location (datacenter) names.
|
# MCP 2.x version pattern for location (datacenter) names.
|
||||||
#
|
#
|
||||||
# Note that this is not a totally reliable way of determining MCP version.
|
# Note that this is not a totally reliable way of determining MCP version.
|
||||||
# Unfortunately, libcloud's NodeLocation currently makes no provision for extended properties.
|
# Unfortunately, libcloud's NodeLocation currently makes no provision for extended properties.
|
||||||
|
|||||||
32
plugins/module_utils/gio_mime.py
Normal file
32
plugins/module_utils/gio_mime.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright (c) 2022, Alexei Znamensky <russoz@gmail.com>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
from ansible_collections.community.general.plugins.module_utils.cmd_runner import CmdRunner, cmd_runner_fmt
|
||||||
|
|
||||||
|
|
||||||
|
def gio_mime_runner(module, **kwargs):
|
||||||
|
return CmdRunner(
|
||||||
|
module,
|
||||||
|
command=['gio', 'mime'],
|
||||||
|
arg_formats=dict(
|
||||||
|
mime_type=cmd_runner_fmt.as_list(),
|
||||||
|
handler=cmd_runner_fmt.as_list(),
|
||||||
|
),
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def gio_mime_get(runner, mime_type):
|
||||||
|
def process(rc, out, err):
|
||||||
|
if err.startswith("No default applications for"):
|
||||||
|
return None
|
||||||
|
out = out.splitlines()[0]
|
||||||
|
return out.split()[-1]
|
||||||
|
|
||||||
|
with runner("mime_type", output_process=process) as ctx:
|
||||||
|
return ctx.run(mime_type=mime_type)
|
||||||
@@ -116,7 +116,7 @@ def gitlab_authentication(module):
|
|||||||
def filter_returned_variables(gitlab_variables):
|
def filter_returned_variables(gitlab_variables):
|
||||||
# pop properties we don't know
|
# pop properties we don't know
|
||||||
existing_variables = [dict(x.attributes) for x in gitlab_variables]
|
existing_variables = [dict(x.attributes) for x in gitlab_variables]
|
||||||
KNOWN = ['key', 'value', 'masked', 'protected', 'variable_type', 'environment_scope']
|
KNOWN = ['key', 'value', 'masked', 'protected', 'variable_type', 'environment_scope', 'raw']
|
||||||
for item in existing_variables:
|
for item in existing_variables:
|
||||||
for key in list(item.keys()):
|
for key in list(item.keys()):
|
||||||
if key not in KNOWN:
|
if key not in KNOWN:
|
||||||
@@ -135,6 +135,7 @@ def vars_to_variables(vars, module):
|
|||||||
"value": str(value),
|
"value": str(value),
|
||||||
"masked": False,
|
"masked": False,
|
||||||
"protected": False,
|
"protected": False,
|
||||||
|
"raw": False,
|
||||||
"variable_type": "env_var",
|
"variable_type": "env_var",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -145,6 +146,7 @@ def vars_to_variables(vars, module):
|
|||||||
"value": value.get('value'),
|
"value": value.get('value'),
|
||||||
"masked": value.get('masked'),
|
"masked": value.get('masked'),
|
||||||
"protected": value.get('protected'),
|
"protected": value.get('protected'),
|
||||||
|
"raw": value.get('raw'),
|
||||||
"variable_type": value.get('variable_type'),
|
"variable_type": value.get('variable_type'),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -203,7 +203,7 @@ class Config(object):
|
|||||||
|
|
||||||
if url == "":
|
if url == "":
|
||||||
raise HwcClientException(
|
raise HwcClientException(
|
||||||
0, "Can not find the enpoint for %s" % service_type)
|
0, "Cannot find the endpoint for %s" % service_type)
|
||||||
|
|
||||||
if url[-1] != "/":
|
if url[-1] != "/":
|
||||||
url += "/"
|
url += "/"
|
||||||
@@ -351,7 +351,7 @@ def wait_to_finish(target, pending, refresh, timeout, min_interval=1, delay=3):
|
|||||||
|
|
||||||
if pending and status not in pending:
|
if pending and status not in pending:
|
||||||
raise HwcModuleException(
|
raise HwcModuleException(
|
||||||
"unexpect status(%s) occurred" % status)
|
"unexpected status(%s) occurred" % status)
|
||||||
|
|
||||||
if not is_last_time:
|
if not is_last_time:
|
||||||
wait *= 2
|
wait *= 2
|
||||||
@@ -362,7 +362,7 @@ def wait_to_finish(target, pending, refresh, timeout, min_interval=1, delay=3):
|
|||||||
|
|
||||||
time.sleep(wait)
|
time.sleep(wait)
|
||||||
|
|
||||||
raise HwcModuleException("asycn wait timeout after %d seconds" % timeout)
|
raise HwcModuleException("async wait timeout after %d seconds" % timeout)
|
||||||
|
|
||||||
|
|
||||||
def navigate_value(data, index, array_index=None):
|
def navigate_value(data, index, array_index=None):
|
||||||
|
|||||||
@@ -116,6 +116,9 @@ URL_AUTHZ_PERMISSIONS = "{url}/admin/realms/{realm}/clients/{client_id}/authz/re
|
|||||||
|
|
||||||
URL_AUTHZ_RESOURCES = "{url}/admin/realms/{realm}/clients/{client_id}/authz/resource-server/resource"
|
URL_AUTHZ_RESOURCES = "{url}/admin/realms/{realm}/clients/{client_id}/authz/resource-server/resource"
|
||||||
|
|
||||||
|
URL_AUTHZ_CUSTOM_POLICY = "{url}/admin/realms/{realm}/clients/{client_id}/authz/resource-server/policy/{policy_type}"
|
||||||
|
URL_AUTHZ_CUSTOM_POLICIES = "{url}/admin/realms/{realm}/clients/{client_id}/authz/resource-server/policy"
|
||||||
|
|
||||||
|
|
||||||
def keycloak_argument_spec():
|
def keycloak_argument_spec():
|
||||||
"""
|
"""
|
||||||
@@ -541,7 +544,7 @@ class KeycloakAPI(object):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def get_client_group_available_rolemappings(self, gid, cid, realm="master"):
|
def get_client_group_available_rolemappings(self, gid, cid, realm="master"):
|
||||||
""" Fetch the available role of a client in a specified goup on the Keycloak server.
|
""" Fetch the available role of a client in a specified group on the Keycloak server.
|
||||||
|
|
||||||
:param gid: ID of the group from which to obtain the rolemappings.
|
:param gid: ID of the group from which to obtain the rolemappings.
|
||||||
:param cid: ID of the client from which to obtain the rolemappings.
|
:param cid: ID of the client from which to obtain the rolemappings.
|
||||||
@@ -624,7 +627,7 @@ class KeycloakAPI(object):
|
|||||||
% (rid, realm, str(e)))
|
% (rid, realm, str(e)))
|
||||||
|
|
||||||
def add_group_rolemapping(self, gid, cid, role_rep, realm="master"):
|
def add_group_rolemapping(self, gid, cid, role_rep, realm="master"):
|
||||||
""" Fetch the composite role of a client in a specified goup on the Keycloak server.
|
""" Fetch the composite role of a client in a specified group on the Keycloak server.
|
||||||
|
|
||||||
:param gid: ID of the group from which to obtain the rolemappings.
|
:param gid: ID of the group from which to obtain the rolemappings.
|
||||||
:param cid: ID of the client from which to obtain the rolemappings.
|
:param cid: ID of the client from which to obtain the rolemappings.
|
||||||
@@ -777,7 +780,8 @@ class KeycloakAPI(object):
|
|||||||
users_url += '?username=%s&exact=true' % username
|
users_url += '?username=%s&exact=true' % username
|
||||||
try:
|
try:
|
||||||
userrep = None
|
userrep = None
|
||||||
users = json.loads(to_native(open_url(users_url, method='GET', headers=self.restheaders, timeout=self.connection_timeout,
|
users = json.loads(to_native(open_url(users_url, method='GET', http_agent=self.http_agent, headers=self.restheaders,
|
||||||
|
timeout=self.connection_timeout,
|
||||||
validate_certs=self.validate_certs).read()))
|
validate_certs=self.validate_certs).read()))
|
||||||
for user in users:
|
for user in users:
|
||||||
if user['username'] == username:
|
if user['username'] == username:
|
||||||
@@ -803,7 +807,8 @@ class KeycloakAPI(object):
|
|||||||
|
|
||||||
service_account_user_url = URL_CLIENT_SERVICE_ACCOUNT_USER.format(url=self.baseurl, realm=realm, id=cid)
|
service_account_user_url = URL_CLIENT_SERVICE_ACCOUNT_USER.format(url=self.baseurl, realm=realm, id=cid)
|
||||||
try:
|
try:
|
||||||
return json.loads(to_native(open_url(service_account_user_url, method='GET', headers=self.restheaders, timeout=self.connection_timeout,
|
return json.loads(to_native(open_url(service_account_user_url, method='GET', http_agent=self.http_agent, headers=self.restheaders,
|
||||||
|
timeout=self.connection_timeout,
|
||||||
validate_certs=self.validate_certs).read()))
|
validate_certs=self.validate_certs).read()))
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
self.module.fail_json(msg='API returned incorrect JSON when trying to obtain the service-account-user for realm %s and client_id %s: %s'
|
self.module.fail_json(msg='API returned incorrect JSON when trying to obtain the service-account-user for realm %s and client_id %s: %s'
|
||||||
@@ -1233,7 +1238,7 @@ class KeycloakAPI(object):
|
|||||||
|
|
||||||
:param realm: Realm in which the clientscope resides.
|
:param realm: Realm in which the clientscope resides.
|
||||||
:param client_id: The client in which the clientscope resides.
|
:param client_id: The client in which the clientscope resides.
|
||||||
:return The optinal clientscopes of this realm or client
|
:return The optional clientscopes of this realm or client
|
||||||
"""
|
"""
|
||||||
url = URL_OPTIONAL_CLIENTSCOPES if client_id is None else URL_CLIENT_OPTIONAL_CLIENTSCOPES
|
url = URL_OPTIONAL_CLIENTSCOPES if client_id is None else URL_CLIENT_OPTIONAL_CLIENTSCOPES
|
||||||
return self._get_clientscopes_of_type(realm, url, 'optional', client_id)
|
return self._get_clientscopes_of_type(realm, url, 'optional', client_id)
|
||||||
@@ -1246,7 +1251,7 @@ class KeycloakAPI(object):
|
|||||||
|
|
||||||
:param realm: Realm in which the clientscope resides.
|
:param realm: Realm in which the clientscope resides.
|
||||||
:param url_template the template for the right type
|
:param url_template the template for the right type
|
||||||
:param scope_type this can be either optinal or default
|
:param scope_type this can be either optional or default
|
||||||
:param client_id: The client in which the clientscope resides.
|
:param client_id: The client in which the clientscope resides.
|
||||||
:return The clientscopes of the specified type of this realm
|
:return The clientscopes of the specified type of this realm
|
||||||
"""
|
"""
|
||||||
@@ -1268,7 +1273,7 @@ class KeycloakAPI(object):
|
|||||||
|
|
||||||
def _decide_url_type_clientscope(self, client_id=None, scope_type="default"):
|
def _decide_url_type_clientscope(self, client_id=None, scope_type="default"):
|
||||||
"""Decides which url to use.
|
"""Decides which url to use.
|
||||||
:param scope_type this can be either optinal or default
|
:param scope_type this can be either optional or default
|
||||||
:param client_id: The client in which the clientscope resides.
|
:param client_id: The client in which the clientscope resides.
|
||||||
"""
|
"""
|
||||||
if client_id is None:
|
if client_id is None:
|
||||||
@@ -1347,7 +1352,8 @@ class KeycloakAPI(object):
|
|||||||
clientsecret_url = URL_CLIENTSECRET.format(url=self.baseurl, realm=realm, id=id)
|
clientsecret_url = URL_CLIENTSECRET.format(url=self.baseurl, realm=realm, id=id)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return json.loads(to_native(open_url(clientsecret_url, method='POST', headers=self.restheaders, timeout=self.connection_timeout,
|
return json.loads(to_native(open_url(clientsecret_url, method='POST', http_agent=self.http_agent, headers=self.restheaders,
|
||||||
|
timeout=self.connection_timeout,
|
||||||
validate_certs=self.validate_certs).read()))
|
validate_certs=self.validate_certs).read()))
|
||||||
|
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
@@ -1370,7 +1376,8 @@ class KeycloakAPI(object):
|
|||||||
clientsecret_url = URL_CLIENTSECRET.format(url=self.baseurl, realm=realm, id=id)
|
clientsecret_url = URL_CLIENTSECRET.format(url=self.baseurl, realm=realm, id=id)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return json.loads(to_native(open_url(clientsecret_url, method='GET', headers=self.restheaders, timeout=self.connection_timeout,
|
return json.loads(to_native(open_url(clientsecret_url, method='GET', http_agent=self.http_agent, headers=self.restheaders,
|
||||||
|
timeout=self.connection_timeout,
|
||||||
validate_certs=self.validate_certs).read()))
|
validate_certs=self.validate_certs).read()))
|
||||||
|
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
@@ -1513,7 +1520,7 @@ class KeycloakAPI(object):
|
|||||||
def get_subgroup_direct_parent(self, parents, realm="master", children_to_resolve=None):
|
def get_subgroup_direct_parent(self, parents, realm="master", children_to_resolve=None):
|
||||||
""" Get keycloak direct parent group API object for a given chain of parents.
|
""" Get keycloak direct parent group API object for a given chain of parents.
|
||||||
|
|
||||||
To succesfully work the API for subgroups we actually dont need
|
To successfully work the API for subgroups we actually don't need
|
||||||
to "walk the whole tree" for nested groups but only need to know
|
to "walk the whole tree" for nested groups but only need to know
|
||||||
the ID for the direct predecessor of current subgroup. This
|
the ID for the direct predecessor of current subgroup. This
|
||||||
method will guarantee us this information getting there with
|
method will guarantee us this information getting there with
|
||||||
@@ -1672,7 +1679,7 @@ class KeycloakAPI(object):
|
|||||||
:param name: Name of the role to fetch.
|
:param name: Name of the role to fetch.
|
||||||
:param realm: Realm in which the role resides; default 'master'.
|
:param realm: Realm in which the role resides; default 'master'.
|
||||||
"""
|
"""
|
||||||
role_url = URL_REALM_ROLE.format(url=self.baseurl, realm=realm, name=quote(name))
|
role_url = URL_REALM_ROLE.format(url=self.baseurl, realm=realm, name=quote(name, safe=''))
|
||||||
try:
|
try:
|
||||||
return json.loads(to_native(open_url(role_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
return json.loads(to_native(open_url(role_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
||||||
validate_certs=self.validate_certs).read()))
|
validate_certs=self.validate_certs).read()))
|
||||||
@@ -1709,7 +1716,7 @@ class KeycloakAPI(object):
|
|||||||
:param rolerep: A RoleRepresentation of the updated role.
|
:param rolerep: A RoleRepresentation of the updated role.
|
||||||
:return HTTPResponse object on success
|
:return HTTPResponse object on success
|
||||||
"""
|
"""
|
||||||
role_url = URL_REALM_ROLE.format(url=self.baseurl, realm=realm, name=quote(rolerep['name']))
|
role_url = URL_REALM_ROLE.format(url=self.baseurl, realm=realm, name=quote(rolerep['name']), safe='')
|
||||||
try:
|
try:
|
||||||
composites = None
|
composites = None
|
||||||
if "composites" in rolerep:
|
if "composites" in rolerep:
|
||||||
@@ -1730,9 +1737,9 @@ class KeycloakAPI(object):
|
|||||||
if clientid is not None:
|
if clientid is not None:
|
||||||
client = self.get_client_by_clientid(client_id=clientid, realm=realm)
|
client = self.get_client_by_clientid(client_id=clientid, realm=realm)
|
||||||
cid = client['id']
|
cid = client['id']
|
||||||
composite_url = URL_CLIENT_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, id=cid, name=quote(rolerep["name"]))
|
composite_url = URL_CLIENT_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, id=cid, name=quote(rolerep["name"], safe=''))
|
||||||
else:
|
else:
|
||||||
composite_url = URL_REALM_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, name=quote(rolerep["name"]))
|
composite_url = URL_REALM_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, name=quote(rolerep["name"], safe=''))
|
||||||
# Get existing composites
|
# Get existing composites
|
||||||
return json.loads(to_native(open_url(
|
return json.loads(to_native(open_url(
|
||||||
composite_url,
|
composite_url,
|
||||||
@@ -1751,9 +1758,9 @@ class KeycloakAPI(object):
|
|||||||
if clientid is not None:
|
if clientid is not None:
|
||||||
client = self.get_client_by_clientid(client_id=clientid, realm=realm)
|
client = self.get_client_by_clientid(client_id=clientid, realm=realm)
|
||||||
cid = client['id']
|
cid = client['id']
|
||||||
composite_url = URL_CLIENT_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, id=cid, name=quote(rolerep["name"]))
|
composite_url = URL_CLIENT_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, id=cid, name=quote(rolerep["name"], safe=''))
|
||||||
else:
|
else:
|
||||||
composite_url = URL_REALM_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, name=quote(rolerep["name"]))
|
composite_url = URL_REALM_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, name=quote(rolerep["name"], safe=''))
|
||||||
# Get existing composites
|
# Get existing composites
|
||||||
# create new composites
|
# create new composites
|
||||||
return open_url(composite_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
return open_url(composite_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
||||||
@@ -1768,9 +1775,9 @@ class KeycloakAPI(object):
|
|||||||
if clientid is not None:
|
if clientid is not None:
|
||||||
client = self.get_client_by_clientid(client_id=clientid, realm=realm)
|
client = self.get_client_by_clientid(client_id=clientid, realm=realm)
|
||||||
cid = client['id']
|
cid = client['id']
|
||||||
composite_url = URL_CLIENT_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, id=cid, name=quote(rolerep["name"]))
|
composite_url = URL_CLIENT_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, id=cid, name=quote(rolerep["name"], safe=''))
|
||||||
else:
|
else:
|
||||||
composite_url = URL_REALM_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, name=quote(rolerep["name"]))
|
composite_url = URL_REALM_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, name=quote(rolerep["name"], safe=''))
|
||||||
# Get existing composites
|
# Get existing composites
|
||||||
# create new composites
|
# create new composites
|
||||||
return open_url(composite_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
return open_url(composite_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
||||||
@@ -1835,7 +1842,7 @@ class KeycloakAPI(object):
|
|||||||
:param name: The name of the role.
|
:param name: The name of the role.
|
||||||
:param realm: The realm in which this role resides, default "master".
|
:param realm: The realm in which this role resides, default "master".
|
||||||
"""
|
"""
|
||||||
role_url = URL_REALM_ROLE.format(url=self.baseurl, realm=realm, name=quote(name))
|
role_url = URL_REALM_ROLE.format(url=self.baseurl, realm=realm, name=quote(name, safe=''))
|
||||||
try:
|
try:
|
||||||
return open_url(role_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
return open_url(role_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
||||||
validate_certs=self.validate_certs)
|
validate_certs=self.validate_certs)
|
||||||
@@ -1879,7 +1886,7 @@ class KeycloakAPI(object):
|
|||||||
if cid is None:
|
if cid is None:
|
||||||
self.module.fail_json(msg='Could not find client %s in realm %s'
|
self.module.fail_json(msg='Could not find client %s in realm %s'
|
||||||
% (clientid, realm))
|
% (clientid, realm))
|
||||||
role_url = URL_CLIENT_ROLE.format(url=self.baseurl, realm=realm, id=cid, name=quote(name))
|
role_url = URL_CLIENT_ROLE.format(url=self.baseurl, realm=realm, id=cid, name=quote(name, safe=''))
|
||||||
try:
|
try:
|
||||||
return json.loads(to_native(open_url(role_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
return json.loads(to_native(open_url(role_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
||||||
validate_certs=self.validate_certs).read()))
|
validate_certs=self.validate_certs).read()))
|
||||||
@@ -1943,7 +1950,7 @@ class KeycloakAPI(object):
|
|||||||
if cid is None:
|
if cid is None:
|
||||||
self.module.fail_json(msg='Could not find client %s in realm %s'
|
self.module.fail_json(msg='Could not find client %s in realm %s'
|
||||||
% (clientid, realm))
|
% (clientid, realm))
|
||||||
role_url = URL_CLIENT_ROLE.format(url=self.baseurl, realm=realm, id=cid, name=quote(rolerep['name']))
|
role_url = URL_CLIENT_ROLE.format(url=self.baseurl, realm=realm, id=cid, name=quote(rolerep['name'], safe=''))
|
||||||
try:
|
try:
|
||||||
composites = None
|
composites = None
|
||||||
if "composites" in rolerep:
|
if "composites" in rolerep:
|
||||||
@@ -1969,7 +1976,7 @@ class KeycloakAPI(object):
|
|||||||
if cid is None:
|
if cid is None:
|
||||||
self.module.fail_json(msg='Could not find client %s in realm %s'
|
self.module.fail_json(msg='Could not find client %s in realm %s'
|
||||||
% (clientid, realm))
|
% (clientid, realm))
|
||||||
role_url = URL_CLIENT_ROLE.format(url=self.baseurl, realm=realm, id=cid, name=quote(name))
|
role_url = URL_CLIENT_ROLE.format(url=self.baseurl, realm=realm, id=cid, name=quote(name, safe=''))
|
||||||
try:
|
try:
|
||||||
return open_url(role_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
return open_url(role_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
||||||
validate_certs=self.validate_certs)
|
validate_certs=self.validate_certs)
|
||||||
@@ -2029,7 +2036,7 @@ class KeycloakAPI(object):
|
|||||||
URL_AUTHENTICATION_FLOW_COPY.format(
|
URL_AUTHENTICATION_FLOW_COPY.format(
|
||||||
url=self.baseurl,
|
url=self.baseurl,
|
||||||
realm=realm,
|
realm=realm,
|
||||||
copyfrom=quote(config["copyFrom"])),
|
copyfrom=quote(config["copyFrom"], safe='')),
|
||||||
method='POST',
|
method='POST',
|
||||||
http_agent=self.http_agent, headers=self.restheaders,
|
http_agent=self.http_agent, headers=self.restheaders,
|
||||||
data=json.dumps(new_name),
|
data=json.dumps(new_name),
|
||||||
@@ -2103,7 +2110,7 @@ class KeycloakAPI(object):
|
|||||||
URL_AUTHENTICATION_FLOW_EXECUTIONS.format(
|
URL_AUTHENTICATION_FLOW_EXECUTIONS.format(
|
||||||
url=self.baseurl,
|
url=self.baseurl,
|
||||||
realm=realm,
|
realm=realm,
|
||||||
flowalias=quote(flowAlias)),
|
flowalias=quote(flowAlias, safe='')),
|
||||||
method='PUT',
|
method='PUT',
|
||||||
http_agent=self.http_agent, headers=self.restheaders,
|
http_agent=self.http_agent, headers=self.restheaders,
|
||||||
data=json.dumps(updatedExec),
|
data=json.dumps(updatedExec),
|
||||||
@@ -2152,7 +2159,7 @@ class KeycloakAPI(object):
|
|||||||
URL_AUTHENTICATION_FLOW_EXECUTIONS_FLOW.format(
|
URL_AUTHENTICATION_FLOW_EXECUTIONS_FLOW.format(
|
||||||
url=self.baseurl,
|
url=self.baseurl,
|
||||||
realm=realm,
|
realm=realm,
|
||||||
flowalias=quote(flowAlias)),
|
flowalias=quote(flowAlias, safe='')),
|
||||||
method='POST',
|
method='POST',
|
||||||
http_agent=self.http_agent, headers=self.restheaders,
|
http_agent=self.http_agent, headers=self.restheaders,
|
||||||
data=json.dumps(newSubFlow),
|
data=json.dumps(newSubFlow),
|
||||||
@@ -2176,7 +2183,7 @@ class KeycloakAPI(object):
|
|||||||
URL_AUTHENTICATION_FLOW_EXECUTIONS_EXECUTION.format(
|
URL_AUTHENTICATION_FLOW_EXECUTIONS_EXECUTION.format(
|
||||||
url=self.baseurl,
|
url=self.baseurl,
|
||||||
realm=realm,
|
realm=realm,
|
||||||
flowalias=quote(flowAlias)),
|
flowalias=quote(flowAlias, safe='')),
|
||||||
method='POST',
|
method='POST',
|
||||||
http_agent=self.http_agent, headers=self.restheaders,
|
http_agent=self.http_agent, headers=self.restheaders,
|
||||||
data=json.dumps(newExec),
|
data=json.dumps(newExec),
|
||||||
@@ -2236,7 +2243,7 @@ class KeycloakAPI(object):
|
|||||||
URL_AUTHENTICATION_FLOW_EXECUTIONS.format(
|
URL_AUTHENTICATION_FLOW_EXECUTIONS.format(
|
||||||
url=self.baseurl,
|
url=self.baseurl,
|
||||||
realm=realm,
|
realm=realm,
|
||||||
flowalias=quote(config["alias"])),
|
flowalias=quote(config["alias"], safe='')),
|
||||||
method='GET',
|
method='GET',
|
||||||
http_agent=self.http_agent, headers=self.restheaders,
|
http_agent=self.http_agent, headers=self.restheaders,
|
||||||
timeout=self.connection_timeout,
|
timeout=self.connection_timeout,
|
||||||
@@ -2329,7 +2336,7 @@ class KeycloakAPI(object):
|
|||||||
return open_url(
|
return open_url(
|
||||||
URL_AUTHENTICATION_REQUIRED_ACTIONS_ALIAS.format(
|
URL_AUTHENTICATION_REQUIRED_ACTIONS_ALIAS.format(
|
||||||
url=self.baseurl,
|
url=self.baseurl,
|
||||||
alias=quote(alias),
|
alias=quote(alias, safe=''),
|
||||||
realm=realm
|
realm=realm
|
||||||
),
|
),
|
||||||
method='PUT',
|
method='PUT',
|
||||||
@@ -2356,7 +2363,7 @@ class KeycloakAPI(object):
|
|||||||
return open_url(
|
return open_url(
|
||||||
URL_AUTHENTICATION_REQUIRED_ACTIONS_ALIAS.format(
|
URL_AUTHENTICATION_REQUIRED_ACTIONS_ALIAS.format(
|
||||||
url=self.baseurl,
|
url=self.baseurl,
|
||||||
alias=quote(alias),
|
alias=quote(alias, safe=''),
|
||||||
realm=realm
|
realm=realm
|
||||||
),
|
),
|
||||||
method='DELETE',
|
method='DELETE',
|
||||||
@@ -2623,7 +2630,7 @@ class KeycloakAPI(object):
|
|||||||
|
|
||||||
def get_authz_authorization_scope_by_name(self, name, client_id, realm):
|
def get_authz_authorization_scope_by_name(self, name, client_id, realm):
|
||||||
url = URL_AUTHZ_AUTHORIZATION_SCOPES.format(url=self.baseurl, client_id=client_id, realm=realm)
|
url = URL_AUTHZ_AUTHORIZATION_SCOPES.format(url=self.baseurl, client_id=client_id, realm=realm)
|
||||||
search_url = "%s/search?name=%s" % (url, quote(name))
|
search_url = "%s/search?name=%s" % (url, quote(name, safe=''))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return json.loads(to_native(open_url(search_url, method='GET', http_agent=self.http_agent, headers=self.restheaders,
|
return json.loads(to_native(open_url(search_url, method='GET', http_agent=self.http_agent, headers=self.restheaders,
|
||||||
@@ -2678,7 +2685,9 @@ class KeycloakAPI(object):
|
|||||||
open_url(
|
open_url(
|
||||||
user_url,
|
user_url,
|
||||||
method='GET',
|
method='GET',
|
||||||
headers=self.restheaders))
|
http_agent=self.http_agent, headers=self.restheaders,
|
||||||
|
timeout=self.connection_timeout,
|
||||||
|
validate_certs=self.validate_certs))
|
||||||
return userrep
|
return userrep
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.module.fail_json(msg='Could not get user %s in realm %s: %s'
|
self.module.fail_json(msg='Could not get user %s in realm %s: %s'
|
||||||
@@ -2700,8 +2709,10 @@ class KeycloakAPI(object):
|
|||||||
realm=realm)
|
realm=realm)
|
||||||
open_url(users_url,
|
open_url(users_url,
|
||||||
method='POST',
|
method='POST',
|
||||||
headers=self.restheaders,
|
http_agent=self.http_agent, headers=self.restheaders,
|
||||||
data=json.dumps(userrep))
|
data=json.dumps(userrep),
|
||||||
|
timeout=self.connection_timeout,
|
||||||
|
validate_certs=self.validate_certs)
|
||||||
created_user = self.get_user_by_username(
|
created_user = self.get_user_by_username(
|
||||||
username=userrep['username'],
|
username=userrep['username'],
|
||||||
realm=realm)
|
realm=realm)
|
||||||
@@ -2744,8 +2755,10 @@ class KeycloakAPI(object):
|
|||||||
open_url(
|
open_url(
|
||||||
user_url,
|
user_url,
|
||||||
method='PUT',
|
method='PUT',
|
||||||
headers=self.restheaders,
|
http_agent=self.http_agent, headers=self.restheaders,
|
||||||
data=json.dumps(userrep))
|
data=json.dumps(userrep),
|
||||||
|
timeout=self.connection_timeout,
|
||||||
|
validate_certs=self.validate_certs)
|
||||||
updated_user = self.get_user_by_id(
|
updated_user = self.get_user_by_id(
|
||||||
user_id=userrep['id'],
|
user_id=userrep['id'],
|
||||||
realm=realm)
|
realm=realm)
|
||||||
@@ -2769,7 +2782,9 @@ class KeycloakAPI(object):
|
|||||||
return open_url(
|
return open_url(
|
||||||
user_url,
|
user_url,
|
||||||
method='DELETE',
|
method='DELETE',
|
||||||
headers=self.restheaders)
|
http_agent=self.http_agent, headers=self.restheaders,
|
||||||
|
timeout=self.connection_timeout,
|
||||||
|
validate_certs=self.validate_certs)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.module.fail_json(msg='Could not delete user %s in realm %s: %s'
|
self.module.fail_json(msg='Could not delete user %s in realm %s: %s'
|
||||||
% (user_id, realm, str(e)))
|
% (user_id, realm, str(e)))
|
||||||
@@ -2791,7 +2806,9 @@ class KeycloakAPI(object):
|
|||||||
open_url(
|
open_url(
|
||||||
user_groups_url,
|
user_groups_url,
|
||||||
method='GET',
|
method='GET',
|
||||||
headers=self.restheaders))
|
http_agent=self.http_agent, headers=self.restheaders,
|
||||||
|
timeout=self.connection_timeout,
|
||||||
|
validate_certs=self.validate_certs))
|
||||||
for user_group in user_groups:
|
for user_group in user_groups:
|
||||||
groups.append(user_group["name"])
|
groups.append(user_group["name"])
|
||||||
return groups
|
return groups
|
||||||
@@ -2816,7 +2833,9 @@ class KeycloakAPI(object):
|
|||||||
return open_url(
|
return open_url(
|
||||||
user_group_url,
|
user_group_url,
|
||||||
method='PUT',
|
method='PUT',
|
||||||
headers=self.restheaders)
|
http_agent=self.http_agent, headers=self.restheaders,
|
||||||
|
timeout=self.connection_timeout,
|
||||||
|
validate_certs=self.validate_certs)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.module.fail_json(msg='Could not add user %s in group %s in realm %s: %s'
|
self.module.fail_json(msg='Could not add user %s in group %s in realm %s: %s'
|
||||||
% (user_id, group_id, realm, str(e)))
|
% (user_id, group_id, realm, str(e)))
|
||||||
@@ -2838,7 +2857,9 @@ class KeycloakAPI(object):
|
|||||||
return open_url(
|
return open_url(
|
||||||
user_group_url,
|
user_group_url,
|
||||||
method='DELETE',
|
method='DELETE',
|
||||||
headers=self.restheaders)
|
http_agent=self.http_agent, headers=self.restheaders,
|
||||||
|
timeout=self.connection_timeout,
|
||||||
|
validate_certs=self.validate_certs)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.module.fail_json(msg='Could not remove user %s from group %s in realm %s: %s'
|
self.module.fail_json(msg='Could not remove user %s from group %s in realm %s: %s'
|
||||||
% (user_id, group_id, realm, str(e)))
|
% (user_id, group_id, realm, str(e)))
|
||||||
@@ -2858,7 +2879,7 @@ class KeycloakAPI(object):
|
|||||||
groups_to_add_and_remove = self.extract_groups_to_add_to_and_remove_from_user(groups)
|
groups_to_add_and_remove = self.extract_groups_to_add_to_and_remove_from_user(groups)
|
||||||
# If group membership need to be changed
|
# If group membership need to be changed
|
||||||
if not is_struct_included(groups_to_add_and_remove['add'], user_existing_groups):
|
if not is_struct_included(groups_to_add_and_remove['add'], user_existing_groups):
|
||||||
# Get available goups in the realm
|
# Get available groups in the realm
|
||||||
realm_groups = self.get_groups(realm=realm)
|
realm_groups = self.get_groups(realm=realm)
|
||||||
for realm_group in realm_groups:
|
for realm_group in realm_groups:
|
||||||
if "name" in realm_group and realm_group["name"] in groups_to_add_and_remove['add']:
|
if "name" in realm_group and realm_group["name"] in groups_to_add_and_remove['add']:
|
||||||
@@ -2904,6 +2925,27 @@ class KeycloakAPI(object):
|
|||||||
list_of_groups.append(group_dict)
|
list_of_groups.append(group_dict)
|
||||||
return list_of_groups
|
return list_of_groups
|
||||||
|
|
||||||
|
def create_authz_custom_policy(self, policy_type, payload, client_id, realm):
|
||||||
|
"""Create a custom policy for a Keycloak client"""
|
||||||
|
url = URL_AUTHZ_CUSTOM_POLICY.format(url=self.baseurl, policy_type=policy_type, client_id=client_id, realm=realm)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return open_url(url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
||||||
|
data=json.dumps(payload), validate_certs=self.validate_certs)
|
||||||
|
except Exception as e:
|
||||||
|
self.module.fail_json(msg='Could not create permission %s for client %s in realm %s: %s' % (payload['name'], client_id, realm, str(e)))
|
||||||
|
|
||||||
|
def remove_authz_custom_policy(self, policy_id, client_id, realm):
|
||||||
|
"""Remove a custom policy from a Keycloak client"""
|
||||||
|
url = URL_AUTHZ_CUSTOM_POLICIES.format(url=self.baseurl, client_id=client_id, realm=realm)
|
||||||
|
delete_url = "%s/%s" % (url, policy_id)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return open_url(delete_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
||||||
|
validate_certs=self.validate_certs)
|
||||||
|
except Exception as e:
|
||||||
|
self.module.fail_json(msg='Could not delete custom policy %s for client %s in realm %s: %s' % (id, client_id, realm, str(e)))
|
||||||
|
|
||||||
def get_authz_permission_by_name(self, name, client_id, realm):
|
def get_authz_permission_by_name(self, name, client_id, realm):
|
||||||
"""Get authorization permission by name"""
|
"""Get authorization permission by name"""
|
||||||
url = URL_AUTHZ_POLICIES.format(url=self.baseurl, client_id=client_id, realm=realm)
|
url = URL_AUTHZ_POLICIES.format(url=self.baseurl, client_id=client_id, realm=realm)
|
||||||
|
|||||||
@@ -139,5 +139,7 @@ class LdapGeneric(object):
|
|||||||
|
|
||||||
def _xorder_dn(self):
|
def _xorder_dn(self):
|
||||||
# match X_ORDERed DNs
|
# match X_ORDERed DNs
|
||||||
regex = r"\w+=\{\d+\}.+"
|
regex = r".+\{\d+\}.+"
|
||||||
return re.match(regex, self.module.params['dn']) is not None
|
explode_dn = ldap.dn.explode_dn(self.module.params['dn'])
|
||||||
|
|
||||||
|
return re.match(regex, explode_dn[0]) is not None
|
||||||
|
|||||||
@@ -14,8 +14,9 @@ from __future__ import (absolute_import, division, print_function)
|
|||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||||
from ansible.module_utils.urls import open_url, urllib_error
|
from ansible.module_utils.urls import open_url
|
||||||
from ansible.module_utils.basic import json
|
from ansible.module_utils.basic import json
|
||||||
|
import ansible.module_utils.six.moves.urllib.error as urllib_error
|
||||||
|
|
||||||
|
|
||||||
class Response(object):
|
class Response(object):
|
||||||
@@ -78,7 +79,7 @@ def memset_api_call(api_key, api_method, payload=None):
|
|||||||
msg = "Memset API returned an error ({0}, {1})." . format(response.json()['error_type'], response.json()['error'])
|
msg = "Memset API returned an error ({0}, {1})." . format(response.json()['error_type'], response.json()['error'])
|
||||||
except urllib_error.URLError as e:
|
except urllib_error.URLError as e:
|
||||||
has_failed = True
|
has_failed = True
|
||||||
msg = "An URLError occured ({0})." . format(type(e))
|
msg = "An URLError occurred ({0})." . format(type(e))
|
||||||
response.stderr = "{0}" . format(e)
|
response.stderr = "{0}" . format(e)
|
||||||
|
|
||||||
if msg is None:
|
if msg is None:
|
||||||
|
|||||||
@@ -79,7 +79,7 @@ def _post_pritunl_organization(
|
|||||||
api_secret=api_secret,
|
api_secret=api_secret,
|
||||||
base_url=base_url,
|
base_url=base_url,
|
||||||
method="POST",
|
method="POST",
|
||||||
path="/organization/%s",
|
path="/organization",
|
||||||
headers={"Content-Type": "application/json"},
|
headers={"Content-Type": "application/json"},
|
||||||
data=json.dumps(organization_data),
|
data=json.dumps(organization_data),
|
||||||
validate_certs=validate_certs,
|
validate_certs=validate_certs,
|
||||||
@@ -220,7 +220,7 @@ def post_pritunl_organization(
|
|||||||
api_secret=api_secret,
|
api_secret=api_secret,
|
||||||
base_url=base_url,
|
base_url=base_url,
|
||||||
organization_data={"name": organization_name},
|
organization_data={"name": organization_name},
|
||||||
validate_certs=True,
|
validate_certs=validate_certs,
|
||||||
)
|
)
|
||||||
|
|
||||||
if response.getcode() != 200:
|
if response.getcode() != 200:
|
||||||
@@ -248,7 +248,7 @@ def post_pritunl_user(
|
|||||||
base_url=base_url,
|
base_url=base_url,
|
||||||
organization_id=organization_id,
|
organization_id=organization_id,
|
||||||
user_data=user_data,
|
user_data=user_data,
|
||||||
validate_certs=True,
|
validate_certs=validate_certs,
|
||||||
)
|
)
|
||||||
|
|
||||||
if response.getcode() != 200:
|
if response.getcode() != 200:
|
||||||
@@ -267,7 +267,7 @@ def post_pritunl_user(
|
|||||||
organization_id=organization_id,
|
organization_id=organization_id,
|
||||||
user_data=user_data,
|
user_data=user_data,
|
||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
validate_certs=True,
|
validate_certs=validate_certs,
|
||||||
)
|
)
|
||||||
|
|
||||||
if response.getcode() != 200:
|
if response.getcode() != 200:
|
||||||
@@ -287,7 +287,7 @@ def delete_pritunl_organization(
|
|||||||
api_secret=api_secret,
|
api_secret=api_secret,
|
||||||
base_url=base_url,
|
base_url=base_url,
|
||||||
organization_id=organization_id,
|
organization_id=organization_id,
|
||||||
validate_certs=True,
|
validate_certs=validate_certs,
|
||||||
)
|
)
|
||||||
|
|
||||||
if response.getcode() != 200:
|
if response.getcode() != 200:
|
||||||
@@ -307,7 +307,7 @@ def delete_pritunl_user(
|
|||||||
base_url=base_url,
|
base_url=base_url,
|
||||||
organization_id=organization_id,
|
organization_id=organization_id,
|
||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
validate_certs=True,
|
validate_certs=validate_certs,
|
||||||
)
|
)
|
||||||
|
|
||||||
if response.getcode() != 200:
|
if response.getcode() != 200:
|
||||||
@@ -331,7 +331,7 @@ def pritunl_auth_request(
|
|||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Send an API call to a Pritunl server.
|
Send an API call to a Pritunl server.
|
||||||
Taken from https://pritunl.com/api and adaped work with Ansible open_url
|
Taken from https://pritunl.com/api and adapted to work with Ansible open_url
|
||||||
"""
|
"""
|
||||||
auth_timestamp = str(int(time.time()))
|
auth_timestamp = str(int(time.time()))
|
||||||
auth_nonce = uuid.uuid4().hex
|
auth_nonce = uuid.uuid4().hex
|
||||||
|
|||||||
@@ -432,7 +432,7 @@ class OcapiUtils(object):
|
|||||||
else:
|
else:
|
||||||
return response
|
return response
|
||||||
details = response["data"]["Status"].get("Details")
|
details = response["data"]["Status"].get("Details")
|
||||||
if type(details) is str:
|
if isinstance(details, str):
|
||||||
details = [details]
|
details = [details]
|
||||||
health_list = response["data"]["Status"]["Health"]
|
health_list = response["data"]["Status"]["Health"]
|
||||||
return_value = {
|
return_value = {
|
||||||
|
|||||||
@@ -434,7 +434,7 @@ def check_and_update_attributes(
|
|||||||
target_instance, attr_name, input_value, existing_value, changed
|
target_instance, attr_name, input_value, existing_value, changed
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
This function checks the difference between two resource attributes of literal types and sets the attrbute
|
This function checks the difference between two resource attributes of literal types and sets the attribute
|
||||||
value in the target instance type holding the attribute.
|
value in the target instance type holding the attribute.
|
||||||
:param target_instance: The instance which contains the attribute whose values to be compared
|
:param target_instance: The instance which contains the attribute whose values to be compared
|
||||||
:param attr_name: Name of the attribute whose value required to be compared
|
:param attr_name: Name of the attribute whose value required to be compared
|
||||||
@@ -561,7 +561,7 @@ def are_lists_equal(s, t):
|
|||||||
if s is None and t is None:
|
if s is None and t is None:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if (s is None and len(t) >= 0) or (t is None and len(s) >= 0) or (len(s) != len(t)):
|
if s is None or t is None or (len(s) != len(t)):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if len(s) == 0:
|
if len(s) == 0:
|
||||||
@@ -570,7 +570,7 @@ def are_lists_equal(s, t):
|
|||||||
s = to_dict(s)
|
s = to_dict(s)
|
||||||
t = to_dict(t)
|
t = to_dict(t)
|
||||||
|
|
||||||
if type(s[0]) == dict:
|
if isinstance(s[0], dict):
|
||||||
# Handle list of dicts. Dictionary returned by the API may have additional keys. For example, a get call on
|
# Handle list of dicts. Dictionary returned by the API may have additional keys. For example, a get call on
|
||||||
# service gateway has an attribute `services` which is a list of `ServiceIdResponseDetails`. This has a key
|
# service gateway has an attribute `services` which is a list of `ServiceIdResponseDetails`. This has a key
|
||||||
# `service_name` which is not provided in the list of `services` by a user while making an update call; only
|
# `service_name` which is not provided in the list of `services` by a user while making an update call; only
|
||||||
@@ -604,9 +604,9 @@ def get_attr_to_update(get_fn, kwargs_get, module, update_attributes):
|
|||||||
user_provided_attr_value = module.params.get(attr, None)
|
user_provided_attr_value = module.params.get(attr, None)
|
||||||
|
|
||||||
unequal_list_attr = (
|
unequal_list_attr = (
|
||||||
type(resources_attr_value) == list or type(user_provided_attr_value) == list
|
isinstance(resources_attr_value, list) or isinstance(user_provided_attr_value, list)
|
||||||
) and not are_lists_equal(user_provided_attr_value, resources_attr_value)
|
) and not are_lists_equal(user_provided_attr_value, resources_attr_value)
|
||||||
unequal_attr = type(resources_attr_value) != list and to_dict(
|
unequal_attr = not isinstance(resources_attr_value, list) and to_dict(
|
||||||
resources_attr_value
|
resources_attr_value
|
||||||
) != to_dict(user_provided_attr_value)
|
) != to_dict(user_provided_attr_value)
|
||||||
if unequal_list_attr or unequal_attr:
|
if unequal_list_attr or unequal_attr:
|
||||||
@@ -785,7 +785,7 @@ def _get_attributes_to_consider(exclude_attributes, model, module):
|
|||||||
attributes_to_consider = list(model.attribute_map)
|
attributes_to_consider = list(model.attribute_map)
|
||||||
if "freeform_tags" in attributes_to_consider:
|
if "freeform_tags" in attributes_to_consider:
|
||||||
attributes_to_consider.remove("freeform_tags")
|
attributes_to_consider.remove("freeform_tags")
|
||||||
# Temporarily removing node_count as the exisiting resource does not reflect it
|
# Temporarily removing node_count as the existing resource does not reflect it
|
||||||
if "node_count" in attributes_to_consider:
|
if "node_count" in attributes_to_consider:
|
||||||
attributes_to_consider.remove("node_count")
|
attributes_to_consider.remove("node_count")
|
||||||
_debug("attributes to consider: {0}".format(attributes_to_consider))
|
_debug("attributes to consider: {0}".format(attributes_to_consider))
|
||||||
@@ -936,9 +936,9 @@ def tuplize(d):
|
|||||||
list_of_tuples = []
|
list_of_tuples = []
|
||||||
key_list = sorted(list(d.keys()))
|
key_list = sorted(list(d.keys()))
|
||||||
for key in key_list:
|
for key in key_list:
|
||||||
if type(d[key]) == list:
|
if isinstance(d[key], list):
|
||||||
# Convert a value which is itself a list of dict to a list of tuples.
|
# Convert a value which is itself a list of dict to a list of tuples.
|
||||||
if d[key] and type(d[key][0]) == dict:
|
if d[key] and isinstance(d[key][0], dict):
|
||||||
sub_tuples = []
|
sub_tuples = []
|
||||||
for sub_dict in d[key]:
|
for sub_dict in d[key]:
|
||||||
sub_tuples.append(tuplize(sub_dict))
|
sub_tuples.append(tuplize(sub_dict))
|
||||||
@@ -948,7 +948,7 @@ def tuplize(d):
|
|||||||
list_of_tuples.append((sub_tuples is None, key, sub_tuples))
|
list_of_tuples.append((sub_tuples is None, key, sub_tuples))
|
||||||
else:
|
else:
|
||||||
list_of_tuples.append((d[key] is None, key, d[key]))
|
list_of_tuples.append((d[key] is None, key, d[key]))
|
||||||
elif type(d[key]) == dict:
|
elif isinstance(d[key], dict):
|
||||||
tupled_value = tuplize(d[key])
|
tupled_value = tuplize(d[key])
|
||||||
list_of_tuples.append((tupled_value is None, key, tupled_value))
|
list_of_tuples.append((tupled_value is None, key, tupled_value))
|
||||||
else:
|
else:
|
||||||
@@ -969,13 +969,13 @@ def sort_dictionary(d):
|
|||||||
"""
|
"""
|
||||||
sorted_d = {}
|
sorted_d = {}
|
||||||
for key in d:
|
for key in d:
|
||||||
if type(d[key]) == list:
|
if isinstance(d[key], list):
|
||||||
if d[key] and type(d[key][0]) == dict:
|
if d[key] and isinstance(d[key][0], dict):
|
||||||
sorted_value = sort_list_of_dictionary(d[key])
|
sorted_value = sort_list_of_dictionary(d[key])
|
||||||
sorted_d[key] = sorted_value
|
sorted_d[key] = sorted_value
|
||||||
else:
|
else:
|
||||||
sorted_d[key] = sorted(d[key])
|
sorted_d[key] = sorted(d[key])
|
||||||
elif type(d[key]) == dict:
|
elif isinstance(d[key], dict):
|
||||||
sorted_d[key] = sort_dictionary(d[key])
|
sorted_d[key] = sort_dictionary(d[key])
|
||||||
else:
|
else:
|
||||||
sorted_d[key] = d[key]
|
sorted_d[key] = d[key]
|
||||||
@@ -1026,10 +1026,7 @@ def check_if_user_value_matches_resources_attr(
|
|||||||
return
|
return
|
||||||
|
|
||||||
if (
|
if (
|
||||||
resources_value_for_attr is None
|
resources_value_for_attr is None or user_provided_value_for_attr is None
|
||||||
and len(user_provided_value_for_attr) >= 0
|
|
||||||
or user_provided_value_for_attr is None
|
|
||||||
and len(resources_value_for_attr) >= 0
|
|
||||||
):
|
):
|
||||||
res[0] = False
|
res[0] = False
|
||||||
return
|
return
|
||||||
@@ -1044,7 +1041,7 @@ def check_if_user_value_matches_resources_attr(
|
|||||||
|
|
||||||
if (
|
if (
|
||||||
user_provided_value_for_attr
|
user_provided_value_for_attr
|
||||||
and type(user_provided_value_for_attr[0]) == dict
|
and isinstance(user_provided_value_for_attr[0], dict)
|
||||||
):
|
):
|
||||||
# Process a list of dict
|
# Process a list of dict
|
||||||
sorted_user_provided_value_for_attr = sort_list_of_dictionary(
|
sorted_user_provided_value_for_attr = sort_list_of_dictionary(
|
||||||
@@ -1532,7 +1529,7 @@ def delete_and_wait(
|
|||||||
result[resource_type] = resource
|
result[resource_type] = resource
|
||||||
return result
|
return result
|
||||||
# oci.wait_until() returns an instance of oci.util.Sentinel in case the resource is not found.
|
# oci.wait_until() returns an instance of oci.util.Sentinel in case the resource is not found.
|
||||||
if type(wait_response) is not Sentinel:
|
if not isinstance(wait_response, Sentinel):
|
||||||
resource = to_dict(wait_response.data)
|
resource = to_dict(wait_response.data)
|
||||||
else:
|
else:
|
||||||
resource["lifecycle_state"] = "DELETED"
|
resource["lifecycle_state"] = "DELETED"
|
||||||
@@ -1547,7 +1544,7 @@ def delete_and_wait(
|
|||||||
except ServiceError as ex:
|
except ServiceError as ex:
|
||||||
# DNS API throws a 400 InvalidParameter when a zone id is provided for zone_name_or_id and if the zone
|
# DNS API throws a 400 InvalidParameter when a zone id is provided for zone_name_or_id and if the zone
|
||||||
# resource is not available, instead of the expected 404. So working around this for now.
|
# resource is not available, instead of the expected 404. So working around this for now.
|
||||||
if type(client) == oci.dns.DnsClient:
|
if isinstance(client, oci.dns.DnsClient):
|
||||||
if ex.status == 400 and ex.code == "InvalidParameter":
|
if ex.status == 400 and ex.code == "InvalidParameter":
|
||||||
_debug(
|
_debug(
|
||||||
"Resource {0} with {1} already deleted. So returning changed=False".format(
|
"Resource {0} with {1} already deleted. So returning changed=False".format(
|
||||||
@@ -1774,7 +1771,7 @@ def update_class_type_attr_difference(
|
|||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Checks the difference and updates an attribute which is represented by a class
|
Checks the difference and updates an attribute which is represented by a class
|
||||||
instance. Not aplicable if the attribute type is a primitive value.
|
instance. Not applicable if the attribute type is a primitive value.
|
||||||
For example, if a class name is A with an attribute x, then if A.x = X(), then only
|
For example, if a class name is A with an attribute x, then if A.x = X(), then only
|
||||||
this method works.
|
this method works.
|
||||||
:param update_class_details The instance which should be updated if there is change in
|
:param update_class_details The instance which should be updated if there is change in
|
||||||
@@ -1936,7 +1933,7 @@ def get_target_resource_from_list(
|
|||||||
module, list_resource_fn, target_resource_id=None, **kwargs
|
module, list_resource_fn, target_resource_id=None, **kwargs
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Returns a resource filtered by identifer from a list of resources. This method should be
|
Returns a resource filtered by identifier from a list of resources. This method should be
|
||||||
used as an alternative of 'get resource' method when 'get resource' is nor provided by
|
used as an alternative of 'get resource' method when 'get resource' is nor provided by
|
||||||
resource api. This method returns a wrapper of response object but that should not be
|
resource api. This method returns a wrapper of response object but that should not be
|
||||||
used as an input to 'wait_until' utility as this is only a partial wrapper of response object.
|
used as an input to 'wait_until' utility as this is only a partial wrapper of response object.
|
||||||
|
|||||||
@@ -42,7 +42,7 @@ def pipx_runner(module, command, **kwargs):
|
|||||||
system_site_packages=fmt.as_bool("--system-site-packages"),
|
system_site_packages=fmt.as_bool("--system-site-packages"),
|
||||||
_list=fmt.as_fixed(['list', '--include-injected', '--json']),
|
_list=fmt.as_fixed(['list', '--include-injected', '--json']),
|
||||||
editable=fmt.as_bool("--editable"),
|
editable=fmt.as_bool("--editable"),
|
||||||
pip_args=fmt.as_opt_val('--pip-args'),
|
pip_args=fmt.as_opt_eq_val('--pip-args'),
|
||||||
),
|
),
|
||||||
environ_update={'USE_EMOJI': '0'},
|
environ_update={'USE_EMOJI': '0'},
|
||||||
check_rc=True,
|
check_rc=True,
|
||||||
|
|||||||
@@ -10,6 +10,8 @@ import json
|
|||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
|
import gzip
|
||||||
|
from io import BytesIO
|
||||||
from ansible.module_utils.urls import open_url
|
from ansible.module_utils.urls import open_url
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
from ansible.module_utils.common.text.converters import to_native
|
||||||
from ansible.module_utils.common.text.converters import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
@@ -128,8 +130,10 @@ class RedfishUtils(object):
|
|||||||
return resp
|
return resp
|
||||||
|
|
||||||
# The following functions are to send GET/POST/PATCH/DELETE requests
|
# The following functions are to send GET/POST/PATCH/DELETE requests
|
||||||
def get_request(self, uri):
|
def get_request(self, uri, override_headers=None):
|
||||||
req_headers = dict(GET_HEADERS)
|
req_headers = dict(GET_HEADERS)
|
||||||
|
if override_headers:
|
||||||
|
req_headers.update(override_headers)
|
||||||
username, password, basic_auth = self._auth_params(req_headers)
|
username, password, basic_auth = self._auth_params(req_headers)
|
||||||
try:
|
try:
|
||||||
# Service root is an unauthenticated resource; remove credentials
|
# Service root is an unauthenticated resource; remove credentials
|
||||||
@@ -141,8 +145,13 @@ class RedfishUtils(object):
|
|||||||
force_basic_auth=basic_auth, validate_certs=False,
|
force_basic_auth=basic_auth, validate_certs=False,
|
||||||
follow_redirects='all',
|
follow_redirects='all',
|
||||||
use_proxy=True, timeout=self.timeout)
|
use_proxy=True, timeout=self.timeout)
|
||||||
data = json.loads(to_native(resp.read()))
|
if override_headers:
|
||||||
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
|
resp = gzip.open(BytesIO(resp.read()), 'rt', encoding='utf-8')
|
||||||
|
data = json.loads(to_native(resp.read()))
|
||||||
|
headers = req_headers
|
||||||
|
else:
|
||||||
|
data = json.loads(to_native(resp.read()))
|
||||||
|
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
msg = self._get_extended_message(e)
|
msg = self._get_extended_message(e)
|
||||||
return {'ret': False,
|
return {'ret': False,
|
||||||
@@ -318,7 +327,7 @@ class RedfishUtils(object):
|
|||||||
found in ansible.module_utils.urls, but it takes files and encodes them
|
found in ansible.module_utils.urls, but it takes files and encodes them
|
||||||
as Base64 strings, which is not expected by Redfish services. It also
|
as Base64 strings, which is not expected by Redfish services. It also
|
||||||
adds escaping of certain bytes in the payload, such as inserting '\r'
|
adds escaping of certain bytes in the payload, such as inserting '\r'
|
||||||
any time it finds a standlone '\n', which corrupts the image payload
|
any time it finds a standalone '\n', which corrupts the image payload
|
||||||
send to the service. This implementation is simplified to Redfish's
|
send to the service. This implementation is simplified to Redfish's
|
||||||
usage and doesn't necessarily represent an exhaustive method of
|
usage and doesn't necessarily represent an exhaustive method of
|
||||||
building multipart requests.
|
building multipart requests.
|
||||||
@@ -717,7 +726,8 @@ class RedfishUtils(object):
|
|||||||
properties = ['CacheSummary', 'FirmwareVersion', 'Identifiers',
|
properties = ['CacheSummary', 'FirmwareVersion', 'Identifiers',
|
||||||
'Location', 'Manufacturer', 'Model', 'Name', 'Id',
|
'Location', 'Manufacturer', 'Model', 'Name', 'Id',
|
||||||
'PartNumber', 'SerialNumber', 'SpeedGbps', 'Status']
|
'PartNumber', 'SerialNumber', 'SpeedGbps', 'Status']
|
||||||
key = "StorageControllers"
|
key = "Controllers"
|
||||||
|
deprecated_key = "StorageControllers"
|
||||||
|
|
||||||
# Find Storage service
|
# Find Storage service
|
||||||
response = self.get_request(self.root_uri + systems_uri)
|
response = self.get_request(self.root_uri + systems_uri)
|
||||||
@@ -745,7 +755,30 @@ class RedfishUtils(object):
|
|||||||
data = response['data']
|
data = response['data']
|
||||||
|
|
||||||
if key in data:
|
if key in data:
|
||||||
controller_list = data[key]
|
controllers_uri = data[key][u'@odata.id']
|
||||||
|
|
||||||
|
response = self.get_request(self.root_uri + controllers_uri)
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
result['ret'] = True
|
||||||
|
data = response['data']
|
||||||
|
|
||||||
|
if data[u'Members']:
|
||||||
|
for controller_member in data[u'Members']:
|
||||||
|
controller_member_uri = controller_member[u'@odata.id']
|
||||||
|
response = self.get_request(self.root_uri + controller_member_uri)
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
result['ret'] = True
|
||||||
|
data = response['data']
|
||||||
|
|
||||||
|
controller_result = {}
|
||||||
|
for property in properties:
|
||||||
|
if property in data:
|
||||||
|
controller_result[property] = data[property]
|
||||||
|
controller_results.append(controller_result)
|
||||||
|
elif deprecated_key in data:
|
||||||
|
controller_list = data[deprecated_key]
|
||||||
for controller in controller_list:
|
for controller in controller_list:
|
||||||
controller_result = {}
|
controller_result = {}
|
||||||
for property in properties:
|
for property in properties:
|
||||||
@@ -767,7 +800,7 @@ class RedfishUtils(object):
|
|||||||
properties = ['BlockSizeBytes', 'CapableSpeedGbs', 'CapacityBytes',
|
properties = ['BlockSizeBytes', 'CapableSpeedGbs', 'CapacityBytes',
|
||||||
'EncryptionAbility', 'EncryptionStatus',
|
'EncryptionAbility', 'EncryptionStatus',
|
||||||
'FailurePredicted', 'HotspareType', 'Id', 'Identifiers',
|
'FailurePredicted', 'HotspareType', 'Id', 'Identifiers',
|
||||||
'Manufacturer', 'MediaType', 'Model', 'Name',
|
'Links', 'Manufacturer', 'MediaType', 'Model', 'Name',
|
||||||
'PartNumber', 'PhysicalLocation', 'Protocol', 'Revision',
|
'PartNumber', 'PhysicalLocation', 'Protocol', 'Revision',
|
||||||
'RotationSpeedRPM', 'SerialNumber', 'Status']
|
'RotationSpeedRPM', 'SerialNumber', 'Status']
|
||||||
|
|
||||||
@@ -800,7 +833,25 @@ class RedfishUtils(object):
|
|||||||
return response
|
return response
|
||||||
data = response['data']
|
data = response['data']
|
||||||
controller_name = 'Controller 1'
|
controller_name = 'Controller 1'
|
||||||
if 'StorageControllers' in data:
|
if 'Controllers' in data:
|
||||||
|
controllers_uri = data['Controllers'][u'@odata.id']
|
||||||
|
|
||||||
|
response = self.get_request(self.root_uri + controllers_uri)
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
result['ret'] = True
|
||||||
|
cdata = response['data']
|
||||||
|
|
||||||
|
if cdata[u'Members']:
|
||||||
|
controller_member_uri = cdata[u'Members'][0][u'@odata.id']
|
||||||
|
|
||||||
|
response = self.get_request(self.root_uri + controller_member_uri)
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
result['ret'] = True
|
||||||
|
cdata = response['data']
|
||||||
|
controller_name = cdata['Name']
|
||||||
|
elif 'StorageControllers' in data:
|
||||||
sc = data['StorageControllers']
|
sc = data['StorageControllers']
|
||||||
if sc:
|
if sc:
|
||||||
if 'Name' in sc[0]:
|
if 'Name' in sc[0]:
|
||||||
@@ -819,7 +870,12 @@ class RedfishUtils(object):
|
|||||||
for property in properties:
|
for property in properties:
|
||||||
if property in data:
|
if property in data:
|
||||||
if data[property] is not None:
|
if data[property] is not None:
|
||||||
drive_result[property] = data[property]
|
if property == "Links":
|
||||||
|
if "Volumes" in data["Links"].keys():
|
||||||
|
volumes = [v["@odata.id"] for v in data["Links"]["Volumes"]]
|
||||||
|
drive_result["Volumes"] = volumes
|
||||||
|
else:
|
||||||
|
drive_result[property] = data[property]
|
||||||
drive_results.append(drive_result)
|
drive_results.append(drive_result)
|
||||||
drives = {'Controller': controller_name,
|
drives = {'Controller': controller_name,
|
||||||
'Drives': drive_results}
|
'Drives': drive_results}
|
||||||
@@ -904,15 +960,7 @@ class RedfishUtils(object):
|
|||||||
return response
|
return response
|
||||||
data = response['data']
|
data = response['data']
|
||||||
controller_name = 'Controller %s' % str(idx)
|
controller_name = 'Controller %s' % str(idx)
|
||||||
if 'StorageControllers' in data:
|
if 'Controllers' in data:
|
||||||
sc = data['StorageControllers']
|
|
||||||
if sc:
|
|
||||||
if 'Name' in sc[0]:
|
|
||||||
controller_name = sc[0]['Name']
|
|
||||||
else:
|
|
||||||
sc_id = sc[0].get('Id', '1')
|
|
||||||
controller_name = 'Controller %s' % sc_id
|
|
||||||
elif 'Controllers' in data:
|
|
||||||
response = self.get_request(self.root_uri + data['Controllers'][u'@odata.id'])
|
response = self.get_request(self.root_uri + data['Controllers'][u'@odata.id'])
|
||||||
if response['ret'] is False:
|
if response['ret'] is False:
|
||||||
return response
|
return response
|
||||||
@@ -930,6 +978,14 @@ class RedfishUtils(object):
|
|||||||
else:
|
else:
|
||||||
controller_id = member_data.get('Id', '1')
|
controller_id = member_data.get('Id', '1')
|
||||||
controller_name = 'Controller %s' % controller_id
|
controller_name = 'Controller %s' % controller_id
|
||||||
|
elif 'StorageControllers' in data:
|
||||||
|
sc = data['StorageControllers']
|
||||||
|
if sc:
|
||||||
|
if 'Name' in sc[0]:
|
||||||
|
controller_name = sc[0]['Name']
|
||||||
|
else:
|
||||||
|
sc_id = sc[0].get('Id', '1')
|
||||||
|
controller_name = 'Controller %s' % sc_id
|
||||||
volume_results = []
|
volume_results = []
|
||||||
volume_list = []
|
volume_list = []
|
||||||
if 'Volumes' in data:
|
if 'Volumes' in data:
|
||||||
@@ -1032,7 +1088,12 @@ class RedfishUtils(object):
|
|||||||
# command should be PowerOn, PowerForceOff, etc.
|
# command should be PowerOn, PowerForceOff, etc.
|
||||||
if not command.startswith('Power'):
|
if not command.startswith('Power'):
|
||||||
return {'ret': False, 'msg': 'Invalid Command (%s)' % command}
|
return {'ret': False, 'msg': 'Invalid Command (%s)' % command}
|
||||||
reset_type = command[5:]
|
|
||||||
|
# Commands (except PowerCycle) will be stripped of the 'Power' prefix
|
||||||
|
if command == 'PowerCycle':
|
||||||
|
reset_type = command
|
||||||
|
else:
|
||||||
|
reset_type = command[5:]
|
||||||
|
|
||||||
# map Reboot to a ResetType that does a reboot
|
# map Reboot to a ResetType that does a reboot
|
||||||
if reset_type == 'Reboot':
|
if reset_type == 'Reboot':
|
||||||
@@ -1499,29 +1560,37 @@ class RedfishUtils(object):
|
|||||||
|
|
||||||
def _software_inventory(self, uri):
|
def _software_inventory(self, uri):
|
||||||
result = {}
|
result = {}
|
||||||
response = self.get_request(self.root_uri + uri)
|
|
||||||
if response['ret'] is False:
|
|
||||||
return response
|
|
||||||
result['ret'] = True
|
|
||||||
data = response['data']
|
|
||||||
|
|
||||||
result['entries'] = []
|
result['entries'] = []
|
||||||
for member in data[u'Members']:
|
|
||||||
uri = self.root_uri + member[u'@odata.id']
|
while uri:
|
||||||
# Get details for each software or firmware member
|
response = self.get_request(self.root_uri + uri)
|
||||||
response = self.get_request(uri)
|
|
||||||
if response['ret'] is False:
|
if response['ret'] is False:
|
||||||
return response
|
return response
|
||||||
result['ret'] = True
|
result['ret'] = True
|
||||||
|
|
||||||
data = response['data']
|
data = response['data']
|
||||||
software = {}
|
if data.get('Members@odata.nextLink'):
|
||||||
# Get these standard properties if present
|
uri = data.get('Members@odata.nextLink')
|
||||||
for key in ['Name', 'Id', 'Status', 'Version', 'Updateable',
|
else:
|
||||||
'SoftwareId', 'LowestSupportedVersion', 'Manufacturer',
|
uri = None
|
||||||
'ReleaseDate']:
|
|
||||||
if key in data:
|
for member in data[u'Members']:
|
||||||
software[key] = data.get(key)
|
fw_uri = self.root_uri + member[u'@odata.id']
|
||||||
result['entries'].append(software)
|
# Get details for each software or firmware member
|
||||||
|
response = self.get_request(fw_uri)
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
result['ret'] = True
|
||||||
|
data = response['data']
|
||||||
|
software = {}
|
||||||
|
# Get these standard properties if present
|
||||||
|
for key in ['Name', 'Id', 'Status', 'Version', 'Updateable',
|
||||||
|
'SoftwareId', 'LowestSupportedVersion', 'Manufacturer',
|
||||||
|
'ReleaseDate']:
|
||||||
|
if key in data:
|
||||||
|
software[key] = data.get(key)
|
||||||
|
result['entries'].append(software)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def get_firmware_inventory(self):
|
def get_firmware_inventory(self):
|
||||||
@@ -1589,7 +1658,10 @@ class RedfishUtils(object):
|
|||||||
|
|
||||||
# Scan the messages to see if next steps are needed
|
# Scan the messages to see if next steps are needed
|
||||||
for message in operation_results['messages']:
|
for message in operation_results['messages']:
|
||||||
message_id = message['MessageId']
|
message_id = message.get('MessageId')
|
||||||
|
if message_id is None:
|
||||||
|
# While this is invalid, treat the lack of a MessageId as "no message"
|
||||||
|
continue
|
||||||
|
|
||||||
if message_id.startswith('Update.1.') and message_id.endswith('.OperationTransitionedToJob'):
|
if message_id.startswith('Update.1.') and message_id.endswith('.OperationTransitionedToJob'):
|
||||||
# Operation rerouted to a job; update the status and handle
|
# Operation rerouted to a job; update the status and handle
|
||||||
@@ -1685,6 +1757,7 @@ class RedfishUtils(object):
|
|||||||
image_file = update_opts.get('update_image_file')
|
image_file = update_opts.get('update_image_file')
|
||||||
targets = update_opts.get('update_targets')
|
targets = update_opts.get('update_targets')
|
||||||
apply_time = update_opts.get('update_apply_time')
|
apply_time = update_opts.get('update_apply_time')
|
||||||
|
oem_params = update_opts.get('update_oem_params')
|
||||||
|
|
||||||
# Ensure the image file is provided
|
# Ensure the image file is provided
|
||||||
if not image_file:
|
if not image_file:
|
||||||
@@ -1715,6 +1788,8 @@ class RedfishUtils(object):
|
|||||||
payload["Targets"] = targets
|
payload["Targets"] = targets
|
||||||
if apply_time:
|
if apply_time:
|
||||||
payload["@Redfish.OperationApplyTime"] = apply_time
|
payload["@Redfish.OperationApplyTime"] = apply_time
|
||||||
|
if oem_params:
|
||||||
|
payload["Oem"] = oem_params
|
||||||
multipart_payload = {
|
multipart_payload = {
|
||||||
'UpdateParameters': {'content': json.dumps(payload), 'mime_type': 'application/json'},
|
'UpdateParameters': {'content': json.dumps(payload), 'mime_type': 'application/json'},
|
||||||
'UpdateFile': {'filename': image_file, 'content': image_payload, 'mime_type': 'application/octet-stream'}
|
'UpdateFile': {'filename': image_file, 'content': image_payload, 'mime_type': 'application/octet-stream'}
|
||||||
@@ -2400,7 +2475,7 @@ class RedfishUtils(object):
|
|||||||
result = {}
|
result = {}
|
||||||
properties = ['Name', 'Id', 'Description', 'FQDN', 'IPv4Addresses', 'IPv6Addresses',
|
properties = ['Name', 'Id', 'Description', 'FQDN', 'IPv4Addresses', 'IPv6Addresses',
|
||||||
'NameServers', 'MACAddress', 'PermanentMACAddress',
|
'NameServers', 'MACAddress', 'PermanentMACAddress',
|
||||||
'SpeedMbps', 'MTUSize', 'AutoNeg', 'Status']
|
'SpeedMbps', 'MTUSize', 'AutoNeg', 'Status', 'LinkStatus']
|
||||||
response = self.get_request(self.root_uri + resource_uri)
|
response = self.get_request(self.root_uri + resource_uri)
|
||||||
if response['ret'] is False:
|
if response['ret'] is False:
|
||||||
return response
|
return response
|
||||||
@@ -3289,7 +3364,7 @@ class RedfishUtils(object):
|
|||||||
result = {}
|
result = {}
|
||||||
inventory = {}
|
inventory = {}
|
||||||
# Get these entries, but does not fail if not found
|
# Get these entries, but does not fail if not found
|
||||||
properties = ['FirmwareVersion', 'ManagerType', 'Manufacturer', 'Model',
|
properties = ['Id', 'FirmwareVersion', 'ManagerType', 'Manufacturer', 'Model',
|
||||||
'PartNumber', 'PowerState', 'SerialNumber', 'Status', 'UUID']
|
'PartNumber', 'PowerState', 'SerialNumber', 'Status', 'UUID']
|
||||||
|
|
||||||
response = self.get_request(self.root_uri + manager_uri)
|
response = self.get_request(self.root_uri + manager_uri)
|
||||||
@@ -3372,6 +3447,25 @@ class RedfishUtils(object):
|
|||||||
|
|
||||||
return self.patch_request(self.root_uri + secure_boot_url, body, check_pyld=True)
|
return self.patch_request(self.root_uri + secure_boot_url, body, check_pyld=True)
|
||||||
|
|
||||||
|
def set_secure_boot(self, secure_boot_enable):
|
||||||
|
# This function enable Secure Boot on an OOB controller
|
||||||
|
|
||||||
|
response = self.get_request(self.root_uri + self.systems_uri)
|
||||||
|
if response["ret"] is False:
|
||||||
|
return response
|
||||||
|
|
||||||
|
server_details = response["data"]
|
||||||
|
secure_boot_url = server_details["SecureBoot"]["@odata.id"]
|
||||||
|
|
||||||
|
response = self.get_request(self.root_uri + secure_boot_url)
|
||||||
|
if response["ret"] is False:
|
||||||
|
return response
|
||||||
|
|
||||||
|
body = {}
|
||||||
|
body["SecureBootEnable"] = secure_boot_enable
|
||||||
|
|
||||||
|
return self.patch_request(self.root_uri + secure_boot_url, body, check_pyld=True)
|
||||||
|
|
||||||
def get_hpe_thermal_config(self):
|
def get_hpe_thermal_config(self):
|
||||||
result = {}
|
result = {}
|
||||||
key = "Thermal"
|
key = "Thermal"
|
||||||
@@ -3403,3 +3497,238 @@ class RedfishUtils(object):
|
|||||||
fan_percent_min_config = hpe.get('FanPercentMinimum')
|
fan_percent_min_config = hpe.get('FanPercentMinimum')
|
||||||
result["fan_percent_min"] = fan_percent_min_config
|
result["fan_percent_min"] = fan_percent_min_config
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
def delete_volumes(self, storage_subsystem_id, volume_ids):
|
||||||
|
# Find the Storage resource from the requested ComputerSystem resource
|
||||||
|
response = self.get_request(self.root_uri + self.systems_uri)
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
data = response['data']
|
||||||
|
storage_uri = data.get('Storage', {}).get('@odata.id')
|
||||||
|
if storage_uri is None:
|
||||||
|
return {'ret': False, 'msg': 'Storage resource not found'}
|
||||||
|
|
||||||
|
# Get Storage Collection
|
||||||
|
response = self.get_request(self.root_uri + storage_uri)
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
data = response['data']
|
||||||
|
|
||||||
|
# Collect Storage Subsystems
|
||||||
|
self.storage_subsystems_uris = [i['@odata.id'] for i in response['data'].get('Members', [])]
|
||||||
|
if not self.storage_subsystems_uris:
|
||||||
|
return {
|
||||||
|
'ret': False,
|
||||||
|
'msg': "StorageCollection's Members array is either empty or missing"}
|
||||||
|
|
||||||
|
# Matching Storage Subsystem ID with user input
|
||||||
|
self.storage_subsystem_uri = ""
|
||||||
|
for storage_subsystem_uri in self.storage_subsystems_uris:
|
||||||
|
if storage_subsystem_uri.split("/")[-2] == storage_subsystem_id:
|
||||||
|
self.storage_subsystem_uri = storage_subsystem_uri
|
||||||
|
|
||||||
|
if not self.storage_subsystem_uri:
|
||||||
|
return {
|
||||||
|
'ret': False,
|
||||||
|
'msg': "Provided Storage Subsystem ID %s does not exist on the server" % storage_subsystem_id}
|
||||||
|
|
||||||
|
# Get Volume Collection
|
||||||
|
response = self.get_request(self.root_uri + self.storage_subsystem_uri)
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
data = response['data']
|
||||||
|
|
||||||
|
response = self.get_request(self.root_uri + data['Volumes']['@odata.id'])
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
data = response['data']
|
||||||
|
|
||||||
|
# Collect Volumes
|
||||||
|
self.volume_uris = [i['@odata.id'] for i in response['data'].get('Members', [])]
|
||||||
|
if not self.volume_uris:
|
||||||
|
return {
|
||||||
|
'ret': True, 'changed': False,
|
||||||
|
'msg': "VolumeCollection's Members array is either empty or missing"}
|
||||||
|
|
||||||
|
# Delete each volume
|
||||||
|
for volume in self.volume_uris:
|
||||||
|
if volume.split("/")[-1] in volume_ids:
|
||||||
|
response = self.delete_request(self.root_uri + volume)
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
|
||||||
|
return {'ret': True, 'changed': True,
|
||||||
|
'msg': "The following volumes were deleted: %s" % str(volume_ids)}
|
||||||
|
|
||||||
|
def create_volume(self, volume_details, storage_subsystem_id):
|
||||||
|
# Find the Storage resource from the requested ComputerSystem resource
|
||||||
|
response = self.get_request(self.root_uri + self.systems_uri)
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
data = response['data']
|
||||||
|
storage_uri = data.get('Storage', {}).get('@odata.id')
|
||||||
|
if storage_uri is None:
|
||||||
|
return {'ret': False, 'msg': 'Storage resource not found'}
|
||||||
|
|
||||||
|
# Get Storage Collection
|
||||||
|
response = self.get_request(self.root_uri + storage_uri)
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
data = response['data']
|
||||||
|
|
||||||
|
# Collect Storage Subsystems
|
||||||
|
self.storage_subsystems_uris = [i['@odata.id'] for i in response['data'].get('Members', [])]
|
||||||
|
if not self.storage_subsystems_uris:
|
||||||
|
return {
|
||||||
|
'ret': False,
|
||||||
|
'msg': "StorageCollection's Members array is either empty or missing"}
|
||||||
|
|
||||||
|
# Matching Storage Subsystem ID with user input
|
||||||
|
self.storage_subsystem_uri = ""
|
||||||
|
for storage_subsystem_uri in self.storage_subsystems_uris:
|
||||||
|
if storage_subsystem_uri.split("/")[-2] == storage_subsystem_id:
|
||||||
|
self.storage_subsystem_uri = storage_subsystem_uri
|
||||||
|
|
||||||
|
if not self.storage_subsystem_uri:
|
||||||
|
return {
|
||||||
|
'ret': False,
|
||||||
|
'msg': "Provided Storage Subsystem ID %s does not exist on the server" % storage_subsystem_id}
|
||||||
|
|
||||||
|
# Validate input parameters
|
||||||
|
required_parameters = ['RAIDType', 'Drives', 'CapacityBytes']
|
||||||
|
allowed_parameters = ['DisplayName', 'InitializeMethod', 'MediaSpanCount',
|
||||||
|
'Name', 'ReadCachePolicy', 'StripSizeBytes', 'VolumeUsage', 'WriteCachePolicy']
|
||||||
|
|
||||||
|
for parameter in required_parameters:
|
||||||
|
if not volume_details.get(parameter):
|
||||||
|
return {
|
||||||
|
'ret': False,
|
||||||
|
'msg': "%s are required parameter to create a volume" % str(required_parameters)}
|
||||||
|
|
||||||
|
# Navigate to the volume uri of the correct storage subsystem
|
||||||
|
response = self.get_request(self.root_uri + self.storage_subsystem_uri)
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
data = response['data']
|
||||||
|
|
||||||
|
# Deleting any volumes of RAIDType None present on the Storage Subsystem
|
||||||
|
response = self.get_request(self.root_uri + data['Volumes']['@odata.id'])
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
volume_data = response['data']
|
||||||
|
|
||||||
|
if "Members" in volume_data:
|
||||||
|
for member in volume_data["Members"]:
|
||||||
|
response = self.get_request(self.root_uri + member['@odata.id'])
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
member_data = response['data']
|
||||||
|
|
||||||
|
if member_data["RAIDType"] == "None":
|
||||||
|
response = self.delete_request(self.root_uri + member['@odata.id'])
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
|
||||||
|
# Construct payload and issue POST command to create volume
|
||||||
|
volume_details["Links"] = {}
|
||||||
|
volume_details["Links"]["Drives"] = []
|
||||||
|
for drive in volume_details["Drives"]:
|
||||||
|
volume_details["Links"]["Drives"].append({"@odata.id": drive})
|
||||||
|
del volume_details["Drives"]
|
||||||
|
payload = volume_details
|
||||||
|
response = self.post_request(self.root_uri + data['Volumes']['@odata.id'], payload)
|
||||||
|
if response['ret'] is False:
|
||||||
|
return response
|
||||||
|
|
||||||
|
return {'ret': True, 'changed': True,
|
||||||
|
'msg': "Volume Created"}
|
||||||
|
|
||||||
|
def get_bios_registries(self):
|
||||||
|
# Get /redfish/v1
|
||||||
|
response = self.get_request(self.root_uri + self.systems_uri)
|
||||||
|
if not response["ret"]:
|
||||||
|
return response
|
||||||
|
|
||||||
|
server_details = response["data"]
|
||||||
|
|
||||||
|
# Get Registries URI
|
||||||
|
if "Bios" not in server_details:
|
||||||
|
msg = "Getting BIOS URI failed, Key 'Bios' not found in /redfish/v1/Systems/1/ response: %s"
|
||||||
|
return {
|
||||||
|
"ret": False,
|
||||||
|
"msg": msg % str(server_details)
|
||||||
|
}
|
||||||
|
|
||||||
|
bios_uri = server_details["Bios"]["@odata.id"]
|
||||||
|
bios_resp = self.get_request(self.root_uri + bios_uri)
|
||||||
|
if not bios_resp["ret"]:
|
||||||
|
return bios_resp
|
||||||
|
|
||||||
|
bios_data = bios_resp["data"]
|
||||||
|
attribute_registry = bios_data["AttributeRegistry"]
|
||||||
|
|
||||||
|
reg_uri = self.root_uri + self.service_root + "Registries/" + attribute_registry
|
||||||
|
reg_resp = self.get_request(reg_uri)
|
||||||
|
if not reg_resp["ret"]:
|
||||||
|
return reg_resp
|
||||||
|
|
||||||
|
reg_data = reg_resp["data"]
|
||||||
|
|
||||||
|
# Get BIOS attribute registry URI
|
||||||
|
lst = []
|
||||||
|
|
||||||
|
# Get the location URI
|
||||||
|
response = self.check_location_uri(reg_data, reg_uri)
|
||||||
|
if not response["ret"]:
|
||||||
|
return response
|
||||||
|
|
||||||
|
rsp_data, rsp_uri = response["rsp_data"], response["rsp_uri"]
|
||||||
|
|
||||||
|
if "RegistryEntries" not in rsp_data:
|
||||||
|
return {
|
||||||
|
"msg": "'RegistryEntries' not present in %s response, %s" % (rsp_uri, str(rsp_data)),
|
||||||
|
"ret": False
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"bios_registry": rsp_data,
|
||||||
|
"bios_registry_uri": rsp_uri,
|
||||||
|
"ret": True
|
||||||
|
}
|
||||||
|
|
||||||
|
def check_location_uri(self, resp_data, resp_uri):
|
||||||
|
# Get the location URI response
|
||||||
|
# return {"msg": self.creds, "ret": False}
|
||||||
|
vendor = self._get_vendor()['Vendor']
|
||||||
|
rsp_uri = ""
|
||||||
|
for loc in resp_data['Location']:
|
||||||
|
if loc['Language'] == "en":
|
||||||
|
rsp_uri = loc['Uri']
|
||||||
|
if vendor == 'HPE':
|
||||||
|
# WORKAROUND
|
||||||
|
# HPE systems with iLO 4 will have BIOS Attribute Registries location URI as a dictionary with key 'extref'
|
||||||
|
# Hence adding condition to fetch the Uri
|
||||||
|
if isinstance(loc['Uri'], dict) and "extref" in loc['Uri'].keys():
|
||||||
|
rsp_uri = loc['Uri']['extref']
|
||||||
|
if not rsp_uri:
|
||||||
|
msg = "Language 'en' not found in BIOS Attribute Registries location, URI: %s, response: %s"
|
||||||
|
return {
|
||||||
|
"ret": False,
|
||||||
|
"msg": msg % (resp_uri, str(resp_data))
|
||||||
|
}
|
||||||
|
|
||||||
|
res = self.get_request(self.root_uri + rsp_uri)
|
||||||
|
if res['ret'] is False:
|
||||||
|
# WORKAROUND
|
||||||
|
# HPE systems with iLO 4 or iLO5 compresses (gzip) for some URIs
|
||||||
|
# Hence adding encoding to the header
|
||||||
|
if vendor == 'HPE':
|
||||||
|
override_headers = {"Accept-Encoding": "gzip"}
|
||||||
|
res = self.get_request(self.root_uri + rsp_uri, override_headers=override_headers)
|
||||||
|
if res['ret']:
|
||||||
|
return {
|
||||||
|
"ret": True,
|
||||||
|
"rsp_data": res["data"],
|
||||||
|
"rsp_uri": rsp_uri
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
|||||||
@@ -303,7 +303,7 @@ class Scaleway(object):
|
|||||||
wait_timeout = self.module.params["wait_timeout"]
|
wait_timeout = self.module.params["wait_timeout"]
|
||||||
wait_sleep_time = self.module.params["wait_sleep_time"]
|
wait_sleep_time = self.module.params["wait_sleep_time"]
|
||||||
|
|
||||||
# Prevent requesting the ressource status too soon
|
# Prevent requesting the resource status too soon
|
||||||
time.sleep(wait_sleep_time)
|
time.sleep(wait_sleep_time)
|
||||||
|
|
||||||
start = datetime.datetime.utcnow()
|
start = datetime.datetime.utcnow()
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ class _Variable(object):
|
|||||||
output (bool, optional): flag indicating whether the variable should be in the output of the module. Defaults to None.
|
output (bool, optional): flag indicating whether the variable should be in the output of the module. Defaults to None.
|
||||||
diff (bool, optional): flag indicating whether to generate diff mode output for this variable. Defaults to None.
|
diff (bool, optional): flag indicating whether to generate diff mode output for this variable. Defaults to None.
|
||||||
change (bool, optional): flag indicating whether to track if changes happened to this variable. Defaults to None.
|
change (bool, optional): flag indicating whether to track if changes happened to this variable. Defaults to None.
|
||||||
fact (bool, optional): flag indicating whether the varaiable should be exposed as a fact of the module. Defaults to None.
|
fact (bool, optional): flag indicating whether the variable should be exposed as a fact of the module. Defaults to None.
|
||||||
initial_value (any, optional): initial value of the variable, to be used with `change`. Defaults to NOTHING.
|
initial_value (any, optional): initial value of the variable, to be used with `change`. Defaults to NOTHING.
|
||||||
verbosity (int, optional): level of verbosity in which this variable is reported by the module as `output`, `fact` or `diff`. Defaults to None.
|
verbosity (int, optional): level of verbosity in which this variable is reported by the module as `output`, `fact` or `diff`. Defaults to None.
|
||||||
"""
|
"""
|
||||||
@@ -127,7 +127,7 @@ class VarDict(object):
|
|||||||
output (bool, optional): flag indicating whether the variable should be in the output of the module. Defaults to None.
|
output (bool, optional): flag indicating whether the variable should be in the output of the module. Defaults to None.
|
||||||
diff (bool, optional): flag indicating whether to generate diff mode output for this variable. Defaults to None.
|
diff (bool, optional): flag indicating whether to generate diff mode output for this variable. Defaults to None.
|
||||||
change (bool, optional): flag indicating whether to track if changes happened to this variable. Defaults to None.
|
change (bool, optional): flag indicating whether to track if changes happened to this variable. Defaults to None.
|
||||||
fact (bool, optional): flag indicating whether the varaiable should be exposed as a fact of the module. Defaults to None.
|
fact (bool, optional): flag indicating whether the variable should be exposed as a fact of the module. Defaults to None.
|
||||||
initial_value (any, optional): initial value of the variable, to be used with `change`. Defaults to NOTHING.
|
initial_value (any, optional): initial value of the variable, to be used with `change`. Defaults to NOTHING.
|
||||||
verbosity (int, optional): level of verbosity in which this variable is reported by the module as `output`, `fact` or `diff`. Defaults to None.
|
verbosity (int, optional): level of verbosity in which this variable is reported by the module as `output`, `fact` or `diff`. Defaults to None.
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -182,7 +182,7 @@ class WdcRedfishUtils(RedfishUtils):
|
|||||||
|
|
||||||
:param str bundle_uri: HTTP URI of the firmware bundle.
|
:param str bundle_uri: HTTP URI of the firmware bundle.
|
||||||
:return: Firmware version number contained in the bundle, and whether or not the bundle is multi-tenant.
|
:return: Firmware version number contained in the bundle, and whether or not the bundle is multi-tenant.
|
||||||
Either value will be None if unable to deterine.
|
Either value will be None if unable to determine.
|
||||||
:rtype: str or None, bool or None
|
:rtype: str or None, bool or None
|
||||||
"""
|
"""
|
||||||
bundle_temp_filename = fetch_file(module=self.module,
|
bundle_temp_filename = fetch_file(module=self.module,
|
||||||
|
|||||||
@@ -204,7 +204,7 @@ def main():
|
|||||||
":" + module.params['action'] + ":" + module.params['command']
|
":" + module.params['action'] + ":" + module.params['command']
|
||||||
|
|
||||||
# If current entry exists or fields are different(if the entry does not
|
# If current entry exists or fields are different(if the entry does not
|
||||||
# exists, then the entry wil be created
|
# exists, then the entry will be created
|
||||||
if (not current_entry['exist']) or (
|
if (not current_entry['exist']) or (
|
||||||
module.params['runlevel'] != current_entry['runlevel'] or
|
module.params['runlevel'] != current_entry['runlevel'] or
|
||||||
module.params['action'] != current_entry['action'] or
|
module.params['action'] != current_entry['action'] or
|
||||||
|
|||||||
@@ -154,7 +154,7 @@ def _get_ctl_binary(module):
|
|||||||
if ctl_binary is not None:
|
if ctl_binary is not None:
|
||||||
return ctl_binary
|
return ctl_binary
|
||||||
|
|
||||||
module.fail_json(msg="Neither of apache2ctl nor apachctl found. At least one apache control binary is necessary.")
|
module.fail_json(msg="Neither of apache2ctl nor apachectl found. At least one apache control binary is necessary.")
|
||||||
|
|
||||||
|
|
||||||
def _module_is_enabled(module):
|
def _module_is_enabled(module):
|
||||||
|
|||||||
@@ -25,6 +25,12 @@ attributes:
|
|||||||
diff_mode:
|
diff_mode:
|
||||||
support: none
|
support: none
|
||||||
options:
|
options:
|
||||||
|
executable:
|
||||||
|
description:
|
||||||
|
- Path to the C(cargo) installed in the system.
|
||||||
|
- If not specified, the module will look C(cargo) in E(PATH).
|
||||||
|
type: path
|
||||||
|
version_added: 7.5.0
|
||||||
name:
|
name:
|
||||||
description:
|
description:
|
||||||
- The name of a Rust package to install.
|
- The name of a Rust package to install.
|
||||||
@@ -44,6 +50,14 @@ options:
|
|||||||
try to install all of them in this version.
|
try to install all of them in this version.
|
||||||
type: str
|
type: str
|
||||||
required: false
|
required: false
|
||||||
|
locked:
|
||||||
|
description:
|
||||||
|
- Install with locked dependencies.
|
||||||
|
- This is only used when installing packages.
|
||||||
|
required: false
|
||||||
|
type: bool
|
||||||
|
default: false
|
||||||
|
version_added: 7.5.0
|
||||||
state:
|
state:
|
||||||
description:
|
description:
|
||||||
- The state of the Rust package.
|
- The state of the Rust package.
|
||||||
@@ -52,7 +66,7 @@ options:
|
|||||||
default: present
|
default: present
|
||||||
choices: [ "present", "absent", "latest" ]
|
choices: [ "present", "absent", "latest" ]
|
||||||
requirements:
|
requirements:
|
||||||
- cargo installed in bin path (recommended /usr/local/bin)
|
- cargo installed
|
||||||
"""
|
"""
|
||||||
|
|
||||||
EXAMPLES = r"""
|
EXAMPLES = r"""
|
||||||
@@ -60,6 +74,11 @@ EXAMPLES = r"""
|
|||||||
community.general.cargo:
|
community.general.cargo:
|
||||||
name: ludusavi
|
name: ludusavi
|
||||||
|
|
||||||
|
- name: Install "ludusavi" Rust package with locked dependencies
|
||||||
|
community.general.cargo:
|
||||||
|
name: ludusavi
|
||||||
|
locked: true
|
||||||
|
|
||||||
- name: Install "ludusavi" Rust package in version 0.10.0
|
- name: Install "ludusavi" Rust package in version 0.10.0
|
||||||
community.general.cargo:
|
community.general.cargo:
|
||||||
name: ludusavi
|
name: ludusavi
|
||||||
@@ -90,12 +109,12 @@ from ansible.module_utils.basic import AnsibleModule
|
|||||||
class Cargo(object):
|
class Cargo(object):
|
||||||
def __init__(self, module, **kwargs):
|
def __init__(self, module, **kwargs):
|
||||||
self.module = module
|
self.module = module
|
||||||
|
self.executable = [kwargs["executable"] or module.get_bin_path("cargo", True)]
|
||||||
self.name = kwargs["name"]
|
self.name = kwargs["name"]
|
||||||
self.path = kwargs["path"]
|
self.path = kwargs["path"]
|
||||||
self.state = kwargs["state"]
|
self.state = kwargs["state"]
|
||||||
self.version = kwargs["version"]
|
self.version = kwargs["version"]
|
||||||
|
self.locked = kwargs["locked"]
|
||||||
self.executable = [module.get_bin_path("cargo", True)]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path(self):
|
def path(self):
|
||||||
@@ -132,6 +151,8 @@ class Cargo(object):
|
|||||||
def install(self, packages=None):
|
def install(self, packages=None):
|
||||||
cmd = ["install"]
|
cmd = ["install"]
|
||||||
cmd.extend(packages or self.name)
|
cmd.extend(packages or self.name)
|
||||||
|
if self.locked:
|
||||||
|
cmd.append("--locked")
|
||||||
if self.path:
|
if self.path:
|
||||||
cmd.append("--root")
|
cmd.append("--root")
|
||||||
cmd.append(self.path)
|
cmd.append(self.path)
|
||||||
@@ -160,15 +181,16 @@ class Cargo(object):
|
|||||||
|
|
||||||
def main():
|
def main():
|
||||||
arg_spec = dict(
|
arg_spec = dict(
|
||||||
|
executable=dict(default=None, type="path"),
|
||||||
name=dict(required=True, type="list", elements="str"),
|
name=dict(required=True, type="list", elements="str"),
|
||||||
path=dict(default=None, type="path"),
|
path=dict(default=None, type="path"),
|
||||||
state=dict(default="present", choices=["present", "absent", "latest"]),
|
state=dict(default="present", choices=["present", "absent", "latest"]),
|
||||||
version=dict(default=None, type="str"),
|
version=dict(default=None, type="str"),
|
||||||
|
locked=dict(default=False, type="bool"),
|
||||||
)
|
)
|
||||||
module = AnsibleModule(argument_spec=arg_spec, supports_check_mode=True)
|
module = AnsibleModule(argument_spec=arg_spec, supports_check_mode=True)
|
||||||
|
|
||||||
name = module.params["name"]
|
name = module.params["name"]
|
||||||
path = module.params["path"]
|
|
||||||
state = module.params["state"]
|
state = module.params["state"]
|
||||||
version = module.params["version"]
|
version = module.params["version"]
|
||||||
|
|
||||||
@@ -180,7 +202,7 @@ def main():
|
|||||||
LANG="C", LC_ALL="C", LC_MESSAGES="C", LC_CTYPE="C"
|
LANG="C", LC_ALL="C", LC_MESSAGES="C", LC_CTYPE="C"
|
||||||
)
|
)
|
||||||
|
|
||||||
cargo = Cargo(module, name=name, path=path, state=state, version=version)
|
cargo = Cargo(module, **module.params)
|
||||||
changed, out, err = False, None, None
|
changed, out, err = False, None, None
|
||||||
installed_packages = cargo.get_installed()
|
installed_packages = cargo.get_installed()
|
||||||
if state == "present":
|
if state == "present":
|
||||||
|
|||||||
@@ -1501,7 +1501,7 @@ class ClcServer:
|
|||||||
return aa_policy_id
|
return aa_policy_id
|
||||||
|
|
||||||
#
|
#
|
||||||
# This is the function that gets patched to the Request.server object using a lamda closure
|
# This is the function that gets patched to the Request.server object using a lambda closure
|
||||||
#
|
#
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|||||||
@@ -138,6 +138,7 @@ options:
|
|||||||
description:
|
description:
|
||||||
- The type of DNS record to create. Required if O(state=present).
|
- The type of DNS record to create. Required if O(state=present).
|
||||||
- O(type=DS), O(type=SSHFP), and O(type=TLSA) were added in Ansible 2.7.
|
- O(type=DS), O(type=SSHFP), and O(type=TLSA) were added in Ansible 2.7.
|
||||||
|
- Note that V(SPF) is no longer supported by CloudFlare. Support for it will be removed from community.general 9.0.0.
|
||||||
type: str
|
type: str
|
||||||
choices: [ A, AAAA, CNAME, DS, MX, NS, SPF, SRV, SSHFP, TLSA, TXT ]
|
choices: [ A, AAAA, CNAME, DS, MX, NS, SPF, SRV, SSHFP, TLSA, TXT ]
|
||||||
value:
|
value:
|
||||||
@@ -613,7 +614,7 @@ class CloudflareAPI(object):
|
|||||||
content = str(params['key_tag']) + '\t' + str(params['algorithm']) + '\t' + str(params['hash_type']) + '\t' + params['value']
|
content = str(params['key_tag']) + '\t' + str(params['algorithm']) + '\t' + str(params['hash_type']) + '\t' + params['value']
|
||||||
elif params['type'] == 'SSHFP':
|
elif params['type'] == 'SSHFP':
|
||||||
if not (params['value'] is None or params['value'] == ''):
|
if not (params['value'] is None or params['value'] == ''):
|
||||||
content = str(params['algorithm']) + '\t' + str(params['hash_type']) + '\t' + params['value']
|
content = str(params['algorithm']) + ' ' + str(params['hash_type']) + ' ' + params['value'].upper()
|
||||||
elif params['type'] == 'TLSA':
|
elif params['type'] == 'TLSA':
|
||||||
if not (params['value'] is None or params['value'] == ''):
|
if not (params['value'] is None or params['value'] == ''):
|
||||||
content = str(params['cert_usage']) + '\t' + str(params['selector']) + '\t' + str(params['hash_type']) + '\t' + params['value']
|
content = str(params['cert_usage']) + '\t' + str(params['selector']) + '\t' + str(params['hash_type']) + '\t' + params['value']
|
||||||
@@ -726,7 +727,7 @@ class CloudflareAPI(object):
|
|||||||
if (attr is None) or (attr == ''):
|
if (attr is None) or (attr == ''):
|
||||||
self.module.fail_json(msg="You must provide algorithm, hash_type and a value to create this record type")
|
self.module.fail_json(msg="You must provide algorithm, hash_type and a value to create this record type")
|
||||||
sshfp_data = {
|
sshfp_data = {
|
||||||
"fingerprint": params['value'],
|
"fingerprint": params['value'].upper(),
|
||||||
"type": params['hash_type'],
|
"type": params['hash_type'],
|
||||||
"algorithm": params['algorithm'],
|
"algorithm": params['algorithm'],
|
||||||
}
|
}
|
||||||
@@ -736,7 +737,7 @@ class CloudflareAPI(object):
|
|||||||
'data': sshfp_data,
|
'data': sshfp_data,
|
||||||
"ttl": params['ttl'],
|
"ttl": params['ttl'],
|
||||||
}
|
}
|
||||||
search_value = str(params['algorithm']) + '\t' + str(params['hash_type']) + '\t' + params['value']
|
search_value = str(params['algorithm']) + ' ' + str(params['hash_type']) + ' ' + params['value']
|
||||||
|
|
||||||
if params['type'] == 'TLSA':
|
if params['type'] == 'TLSA':
|
||||||
for attr in [params['port'], params['proto'], params['cert_usage'], params['selector'], params['hash_type'], params['value']]:
|
for attr in [params['port'], params['proto'], params['cert_usage'], params['selector'], params['hash_type'], params['value']]:
|
||||||
|
|||||||
@@ -170,10 +170,15 @@ def get_available_options(module, command='install'):
|
|||||||
return command_help_json['definition']['options']
|
return command_help_json['definition']['options']
|
||||||
|
|
||||||
|
|
||||||
def composer_command(module, command, arguments="", options=None, global_command=False):
|
def composer_command(module, command, arguments="", options=None):
|
||||||
if options is None:
|
if options is None:
|
||||||
options = []
|
options = []
|
||||||
|
|
||||||
|
global_command = module.params['global_command']
|
||||||
|
|
||||||
|
if not global_command:
|
||||||
|
options.extend(['--working-dir', "'%s'" % module.params['working_dir']])
|
||||||
|
|
||||||
if module.params['executable'] is None:
|
if module.params['executable'] is None:
|
||||||
php_path = module.get_bin_path("php", True, ["/usr/local/bin"])
|
php_path = module.get_bin_path("php", True, ["/usr/local/bin"])
|
||||||
else:
|
else:
|
||||||
@@ -217,7 +222,6 @@ def main():
|
|||||||
module.fail_json(msg="Use the 'arguments' param for passing arguments with the 'command'")
|
module.fail_json(msg="Use the 'arguments' param for passing arguments with the 'command'")
|
||||||
|
|
||||||
arguments = module.params['arguments']
|
arguments = module.params['arguments']
|
||||||
global_command = module.params['global_command']
|
|
||||||
available_options = get_available_options(module=module, command=command)
|
available_options = get_available_options(module=module, command=command)
|
||||||
|
|
||||||
options = []
|
options = []
|
||||||
@@ -234,9 +238,6 @@ def main():
|
|||||||
option = "--%s" % option
|
option = "--%s" % option
|
||||||
options.append(option)
|
options.append(option)
|
||||||
|
|
||||||
if not global_command:
|
|
||||||
options.extend(['--working-dir', "'%s'" % module.params['working_dir']])
|
|
||||||
|
|
||||||
option_params = {
|
option_params = {
|
||||||
'prefer_source': 'prefer-source',
|
'prefer_source': 'prefer-source',
|
||||||
'prefer_dist': 'prefer-dist',
|
'prefer_dist': 'prefer-dist',
|
||||||
@@ -260,7 +261,7 @@ def main():
|
|||||||
else:
|
else:
|
||||||
module.exit_json(skipped=True, msg="command '%s' does not support check mode, skipping" % command)
|
module.exit_json(skipped=True, msg="command '%s' does not support check mode, skipping" % command)
|
||||||
|
|
||||||
rc, out, err = composer_command(module, command, arguments, options, global_command)
|
rc, out, err = composer_command(module, command, arguments, options)
|
||||||
|
|
||||||
if rc != 0:
|
if rc != 0:
|
||||||
output = parse_out(err)
|
output = parse_out(err)
|
||||||
|
|||||||
644
plugins/modules/consul_role.py
Normal file
644
plugins/modules/consul_role.py
Normal file
@@ -0,0 +1,644 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright (c) 2022, Håkon Lerring
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
DOCUMENTATION = '''
|
||||||
|
module: consul_role
|
||||||
|
short_description: Manipulate Consul roles
|
||||||
|
version_added: 7.5.0
|
||||||
|
description:
|
||||||
|
- Allows the addition, modification and deletion of roles in a consul
|
||||||
|
cluster via the agent. For more details on using and configuring ACLs,
|
||||||
|
see U(https://www.consul.io/docs/guides/acl.html).
|
||||||
|
author:
|
||||||
|
- Håkon Lerring (@Hakon)
|
||||||
|
extends_documentation_fragment:
|
||||||
|
- community.general.attributes
|
||||||
|
attributes:
|
||||||
|
check_mode:
|
||||||
|
support: full
|
||||||
|
diff_mode:
|
||||||
|
support: none
|
||||||
|
options:
|
||||||
|
name:
|
||||||
|
description:
|
||||||
|
- A name used to identify the role.
|
||||||
|
required: true
|
||||||
|
type: str
|
||||||
|
state:
|
||||||
|
description:
|
||||||
|
- whether the role should be present or absent.
|
||||||
|
required: false
|
||||||
|
choices: ['present', 'absent']
|
||||||
|
default: present
|
||||||
|
type: str
|
||||||
|
description:
|
||||||
|
description:
|
||||||
|
- Description of the role.
|
||||||
|
- If not specified, the assigned description will not be changed.
|
||||||
|
required: false
|
||||||
|
type: str
|
||||||
|
policies:
|
||||||
|
type: list
|
||||||
|
elements: dict
|
||||||
|
description:
|
||||||
|
- List of policies to attach to the role. Each policy is a dict.
|
||||||
|
- If the parameter is left blank, any policies currently assigned will not be changed.
|
||||||
|
- Any empty array (V([])) will clear any policies previously set.
|
||||||
|
required: false
|
||||||
|
suboptions:
|
||||||
|
name:
|
||||||
|
description:
|
||||||
|
- The name of the policy to attach to this role; see M(community.general.consul_policy) for more info.
|
||||||
|
- Either this or O(policies[].id) must be specified.
|
||||||
|
type: str
|
||||||
|
id:
|
||||||
|
description:
|
||||||
|
- The ID of the policy to attach to this role; see M(community.general.consul_policy) for more info.
|
||||||
|
- Either this or O(policies[].name) must be specified.
|
||||||
|
type: str
|
||||||
|
service_identities:
|
||||||
|
type: list
|
||||||
|
elements: dict
|
||||||
|
description:
|
||||||
|
- List of service identities to attach to the role.
|
||||||
|
- If not specified, any service identities currently assigned will not be changed.
|
||||||
|
- If the parameter is an empty array (V([])), any node identities assigned will be unassigned.
|
||||||
|
required: false
|
||||||
|
suboptions:
|
||||||
|
name:
|
||||||
|
description:
|
||||||
|
- The name of the node.
|
||||||
|
- Must not be longer than 256 characters, must start and end with a lowercase alphanumeric character.
|
||||||
|
- May only contain lowercase alphanumeric characters as well as - and _.
|
||||||
|
type: str
|
||||||
|
required: true
|
||||||
|
datacenters:
|
||||||
|
description:
|
||||||
|
- The datacenters the policies will be effective.
|
||||||
|
- This will result in effective policy only being valid in this datacenter.
|
||||||
|
- If an empty array (V([])) is specified, the policies will valid in all datacenters.
|
||||||
|
- including those which do not yet exist but may in the future.
|
||||||
|
type: list
|
||||||
|
elements: str
|
||||||
|
required: true
|
||||||
|
node_identities:
|
||||||
|
type: list
|
||||||
|
elements: dict
|
||||||
|
description:
|
||||||
|
- List of node identities to attach to the role.
|
||||||
|
- If not specified, any node identities currently assigned will not be changed.
|
||||||
|
- If the parameter is an empty array (V([])), any node identities assigned will be unassigned.
|
||||||
|
required: false
|
||||||
|
suboptions:
|
||||||
|
name:
|
||||||
|
description:
|
||||||
|
- The name of the node.
|
||||||
|
- Must not be longer than 256 characters, must start and end with a lowercase alphanumeric character.
|
||||||
|
- May only contain lowercase alphanumeric characters as well as - and _.
|
||||||
|
type: str
|
||||||
|
required: true
|
||||||
|
datacenter:
|
||||||
|
description:
|
||||||
|
- The nodes datacenter.
|
||||||
|
- This will result in effective policy only being valid in this datacenter.
|
||||||
|
type: str
|
||||||
|
required: true
|
||||||
|
host:
|
||||||
|
description:
|
||||||
|
- Host of the consul agent, defaults to V(localhost).
|
||||||
|
required: false
|
||||||
|
default: localhost
|
||||||
|
type: str
|
||||||
|
port:
|
||||||
|
type: int
|
||||||
|
description:
|
||||||
|
- The port on which the consul agent is running.
|
||||||
|
required: false
|
||||||
|
default: 8500
|
||||||
|
scheme:
|
||||||
|
description:
|
||||||
|
- The protocol scheme on which the consul agent is running.
|
||||||
|
required: false
|
||||||
|
default: http
|
||||||
|
type: str
|
||||||
|
token:
|
||||||
|
description:
|
||||||
|
- A management token is required to manipulate the roles.
|
||||||
|
type: str
|
||||||
|
validate_certs:
|
||||||
|
type: bool
|
||||||
|
description:
|
||||||
|
- Whether to verify the TLS certificate of the consul agent.
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
requirements:
|
||||||
|
- requests
|
||||||
|
'''
|
||||||
|
|
||||||
|
EXAMPLES = """
|
||||||
|
- name: Create a role with 2 policies
|
||||||
|
community.general.consul_role:
|
||||||
|
host: consul1.example.com
|
||||||
|
token: some_management_acl
|
||||||
|
name: foo-role
|
||||||
|
policies:
|
||||||
|
- id: 783beef3-783f-f41f-7422-7087dc272765
|
||||||
|
- name: "policy-1"
|
||||||
|
|
||||||
|
- name: Create a role with service identity
|
||||||
|
community.general.consul_role:
|
||||||
|
host: consul1.example.com
|
||||||
|
token: some_management_acl
|
||||||
|
name: foo-role-2
|
||||||
|
service_identities:
|
||||||
|
- name: web
|
||||||
|
datacenters:
|
||||||
|
- dc1
|
||||||
|
|
||||||
|
- name: Create a role with node identity
|
||||||
|
community.general.consul_role:
|
||||||
|
host: consul1.example.com
|
||||||
|
token: some_management_acl
|
||||||
|
name: foo-role-3
|
||||||
|
node_identities:
|
||||||
|
- name: node-1
|
||||||
|
datacenter: dc2
|
||||||
|
|
||||||
|
- name: Remove a role
|
||||||
|
community.general.consul_role:
|
||||||
|
host: consul1.example.com
|
||||||
|
token: some_management_acl
|
||||||
|
name: foo-role-3
|
||||||
|
state: absent
|
||||||
|
"""
|
||||||
|
|
||||||
|
RETURN = """
|
||||||
|
role:
|
||||||
|
description: The role object.
|
||||||
|
returned: success
|
||||||
|
type: dict
|
||||||
|
sample:
|
||||||
|
{
|
||||||
|
"CreateIndex": 39,
|
||||||
|
"Description": "",
|
||||||
|
"Hash": "Trt0QJtxVEfvTTIcdTUbIJRr6Dsi6E4EcwSFxx9tCYM=",
|
||||||
|
"ID": "9a300b8d-48db-b720-8544-a37c0f5dafb5",
|
||||||
|
"ModifyIndex": 39,
|
||||||
|
"Name": "foo-role",
|
||||||
|
"Policies": [
|
||||||
|
{"ID": "b1a00172-d7a1-0e66-a12e-7a4045c4b774", "Name": "foo-access"}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
operation:
|
||||||
|
description: The operation performed on the role.
|
||||||
|
returned: changed
|
||||||
|
type: str
|
||||||
|
sample: update
|
||||||
|
"""
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible.module_utils.basic import missing_required_lib
|
||||||
|
from ansible_collections.community.general.plugins.module_utils.consul import (
|
||||||
|
get_consul_url, get_auth_headers, handle_consul_response_error)
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
REQUESTS_IMP_ERR = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
from requests.exceptions import ConnectionError
|
||||||
|
import requests
|
||||||
|
HAS_REQUESTS = True
|
||||||
|
except ImportError:
|
||||||
|
HAS_REQUESTS = False
|
||||||
|
REQUESTS_IMP_ERR = traceback.format_exc()
|
||||||
|
|
||||||
|
TOKEN_PARAMETER_NAME = "token"
|
||||||
|
HOST_PARAMETER_NAME = "host"
|
||||||
|
SCHEME_PARAMETER_NAME = "scheme"
|
||||||
|
VALIDATE_CERTS_PARAMETER_NAME = "validate_certs"
|
||||||
|
NAME_PARAMETER_NAME = "name"
|
||||||
|
DESCRIPTION_PARAMETER_NAME = "description"
|
||||||
|
PORT_PARAMETER_NAME = "port"
|
||||||
|
POLICIES_PARAMETER_NAME = "policies"
|
||||||
|
SERVICE_IDENTITIES_PARAMETER_NAME = "service_identities"
|
||||||
|
NODE_IDENTITIES_PARAMETER_NAME = "node_identities"
|
||||||
|
STATE_PARAMETER_NAME = "state"
|
||||||
|
|
||||||
|
PRESENT_STATE_VALUE = "present"
|
||||||
|
ABSENT_STATE_VALUE = "absent"
|
||||||
|
|
||||||
|
REMOVE_OPERATION = "remove"
|
||||||
|
UPDATE_OPERATION = "update"
|
||||||
|
CREATE_OPERATION = "create"
|
||||||
|
|
||||||
|
POLICY_RULE_SPEC = dict(
|
||||||
|
name=dict(type='str'),
|
||||||
|
id=dict(type='str'),
|
||||||
|
)
|
||||||
|
|
||||||
|
NODE_ID_RULE_SPEC = dict(
|
||||||
|
name=dict(type='str', required=True),
|
||||||
|
datacenter=dict(type='str', required=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
SERVICE_ID_RULE_SPEC = dict(
|
||||||
|
name=dict(type='str', required=True),
|
||||||
|
datacenters=dict(type='list', elements='str', required=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
_ARGUMENT_SPEC = {
|
||||||
|
TOKEN_PARAMETER_NAME: dict(no_log=True),
|
||||||
|
PORT_PARAMETER_NAME: dict(default=8500, type='int'),
|
||||||
|
HOST_PARAMETER_NAME: dict(default='localhost'),
|
||||||
|
SCHEME_PARAMETER_NAME: dict(default='http'),
|
||||||
|
VALIDATE_CERTS_PARAMETER_NAME: dict(type='bool', default=True),
|
||||||
|
NAME_PARAMETER_NAME: dict(required=True),
|
||||||
|
DESCRIPTION_PARAMETER_NAME: dict(required=False, type='str', default=None),
|
||||||
|
POLICIES_PARAMETER_NAME: dict(type='list', elements='dict', options=POLICY_RULE_SPEC,
|
||||||
|
mutually_exclusive=[('name', 'id')], required_one_of=[('name', 'id')], default=None),
|
||||||
|
SERVICE_IDENTITIES_PARAMETER_NAME: dict(type='list', elements='dict', options=SERVICE_ID_RULE_SPEC, default=None),
|
||||||
|
NODE_IDENTITIES_PARAMETER_NAME: dict(type='list', elements='dict', options=NODE_ID_RULE_SPEC, default=None),
|
||||||
|
STATE_PARAMETER_NAME: dict(default=PRESENT_STATE_VALUE, choices=[PRESENT_STATE_VALUE, ABSENT_STATE_VALUE]),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def compare_consul_api_role_policy_objects(first, second):
|
||||||
|
# compare two lists of dictionaries, ignoring the ID element
|
||||||
|
for x in first:
|
||||||
|
x.pop('ID', None)
|
||||||
|
|
||||||
|
for x in second:
|
||||||
|
x.pop('ID', None)
|
||||||
|
|
||||||
|
return first == second
|
||||||
|
|
||||||
|
|
||||||
|
def update_role(role, configuration):
|
||||||
|
url = '%s/acl/role/%s' % (get_consul_url(configuration),
|
||||||
|
role['ID'])
|
||||||
|
headers = get_auth_headers(configuration)
|
||||||
|
|
||||||
|
update_role_data = {
|
||||||
|
'Name': configuration.name,
|
||||||
|
'Description': configuration.description,
|
||||||
|
}
|
||||||
|
|
||||||
|
# check if the user omitted the description, policies, service identities, or node identities
|
||||||
|
|
||||||
|
description_specified = configuration.description is not None
|
||||||
|
|
||||||
|
policy_specified = True
|
||||||
|
if len(configuration.policies) == 1 and configuration.policies[0] is None:
|
||||||
|
policy_specified = False
|
||||||
|
|
||||||
|
service_id_specified = True
|
||||||
|
if len(configuration.service_identities) == 1 and configuration.service_identities[0] is None:
|
||||||
|
service_id_specified = False
|
||||||
|
|
||||||
|
node_id_specified = True
|
||||||
|
if len(configuration.node_identities) == 1 and configuration.node_identities[0] is None:
|
||||||
|
node_id_specified = False
|
||||||
|
|
||||||
|
if description_specified:
|
||||||
|
update_role_data["Description"] = configuration.description
|
||||||
|
|
||||||
|
if policy_specified:
|
||||||
|
update_role_data["Policies"] = [x.to_dict() for x in configuration.policies]
|
||||||
|
|
||||||
|
if configuration.version >= ConsulVersion("1.5.0") and service_id_specified:
|
||||||
|
update_role_data["ServiceIdentities"] = [
|
||||||
|
x.to_dict() for x in configuration.service_identities]
|
||||||
|
|
||||||
|
if configuration.version >= ConsulVersion("1.8.0") and node_id_specified:
|
||||||
|
update_role_data["NodeIdentities"] = [
|
||||||
|
x.to_dict() for x in configuration.node_identities]
|
||||||
|
|
||||||
|
if configuration.check_mode:
|
||||||
|
description_changed = False
|
||||||
|
if description_specified:
|
||||||
|
description_changed = role.get('Description') != update_role_data["Description"]
|
||||||
|
else:
|
||||||
|
update_role_data["Description"] = role.get("Description")
|
||||||
|
|
||||||
|
policies_changed = False
|
||||||
|
if policy_specified:
|
||||||
|
policies_changed = not (
|
||||||
|
compare_consul_api_role_policy_objects(role.get('Policies', []), update_role_data.get('Policies', [])))
|
||||||
|
else:
|
||||||
|
if role.get('Policies') is not None:
|
||||||
|
update_role_data["Policies"] = role.get('Policies')
|
||||||
|
|
||||||
|
service_ids_changed = False
|
||||||
|
if service_id_specified:
|
||||||
|
service_ids_changed = role.get('ServiceIdentities') != update_role_data.get('ServiceIdentities')
|
||||||
|
else:
|
||||||
|
if role.get('ServiceIdentities') is not None:
|
||||||
|
update_role_data["ServiceIdentities"] = role.get('ServiceIdentities')
|
||||||
|
|
||||||
|
node_ids_changed = False
|
||||||
|
if node_id_specified:
|
||||||
|
node_ids_changed = role.get('NodeIdentities') != update_role_data.get('NodeIdentities')
|
||||||
|
else:
|
||||||
|
if role.get('NodeIdentities'):
|
||||||
|
update_role_data["NodeIdentities"] = role.get('NodeIdentities')
|
||||||
|
|
||||||
|
changed = (
|
||||||
|
description_changed or
|
||||||
|
policies_changed or
|
||||||
|
service_ids_changed or
|
||||||
|
node_ids_changed
|
||||||
|
)
|
||||||
|
return Output(changed=changed, operation=UPDATE_OPERATION, role=update_role_data)
|
||||||
|
else:
|
||||||
|
# if description, policies, service or node id are not specified; we need to get the existing value and apply it
|
||||||
|
if not description_specified and role.get('Description') is not None:
|
||||||
|
update_role_data["Description"] = role.get('Description')
|
||||||
|
|
||||||
|
if not policy_specified and role.get('Policies') is not None:
|
||||||
|
update_role_data["Policies"] = role.get('Policies')
|
||||||
|
|
||||||
|
if not service_id_specified and role.get('ServiceIdentities') is not None:
|
||||||
|
update_role_data["ServiceIdentities"] = role.get('ServiceIdentities')
|
||||||
|
|
||||||
|
if not node_id_specified and role.get('NodeIdentities') is not None:
|
||||||
|
update_role_data["NodeIdentities"] = role.get('NodeIdentities')
|
||||||
|
|
||||||
|
response = requests.put(url, headers=headers, json=update_role_data, verify=configuration.validate_certs)
|
||||||
|
handle_consul_response_error(response)
|
||||||
|
|
||||||
|
resulting_role = response.json()
|
||||||
|
changed = (
|
||||||
|
role['Description'] != resulting_role['Description'] or
|
||||||
|
role.get('Policies', None) != resulting_role.get('Policies', None) or
|
||||||
|
role.get('ServiceIdentities', None) != resulting_role.get('ServiceIdentities', None) or
|
||||||
|
role.get('NodeIdentities', None) != resulting_role.get('NodeIdentities', None)
|
||||||
|
)
|
||||||
|
|
||||||
|
return Output(changed=changed, operation=UPDATE_OPERATION, role=resulting_role)
|
||||||
|
|
||||||
|
|
||||||
|
def create_role(configuration):
|
||||||
|
url = '%s/acl/role' % get_consul_url(configuration)
|
||||||
|
headers = get_auth_headers(configuration)
|
||||||
|
|
||||||
|
# check if the user omitted policies, service identities, or node identities
|
||||||
|
policy_specified = True
|
||||||
|
if len(configuration.policies) == 1 and configuration.policies[0] is None:
|
||||||
|
policy_specified = False
|
||||||
|
|
||||||
|
service_id_specified = True
|
||||||
|
if len(configuration.service_identities) == 1 and configuration.service_identities[0] is None:
|
||||||
|
service_id_specified = False
|
||||||
|
|
||||||
|
node_id_specified = True
|
||||||
|
if len(configuration.node_identities) == 1 and configuration.node_identities[0] is None:
|
||||||
|
node_id_specified = False
|
||||||
|
|
||||||
|
# get rid of None item so we can set an empty list for policies, service identities and node identities
|
||||||
|
if not policy_specified:
|
||||||
|
configuration.policies.pop()
|
||||||
|
|
||||||
|
if not service_id_specified:
|
||||||
|
configuration.service_identities.pop()
|
||||||
|
|
||||||
|
if not node_id_specified:
|
||||||
|
configuration.node_identities.pop()
|
||||||
|
|
||||||
|
create_role_data = {
|
||||||
|
'Name': configuration.name,
|
||||||
|
'Description': configuration.description,
|
||||||
|
'Policies': [x.to_dict() for x in configuration.policies],
|
||||||
|
}
|
||||||
|
if configuration.version >= ConsulVersion("1.5.0"):
|
||||||
|
create_role_data["ServiceIdentities"] = [x.to_dict() for x in configuration.service_identities]
|
||||||
|
|
||||||
|
if configuration.version >= ConsulVersion("1.8.0"):
|
||||||
|
create_role_data["NodeIdentities"] = [x.to_dict() for x in configuration.node_identities]
|
||||||
|
|
||||||
|
if not configuration.check_mode:
|
||||||
|
response = requests.put(url, headers=headers, json=create_role_data, verify=configuration.validate_certs)
|
||||||
|
handle_consul_response_error(response)
|
||||||
|
|
||||||
|
resulting_role = response.json()
|
||||||
|
|
||||||
|
return Output(changed=True, operation=CREATE_OPERATION, role=resulting_role)
|
||||||
|
else:
|
||||||
|
return Output(changed=True, operation=CREATE_OPERATION)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_role(configuration):
|
||||||
|
roles = get_roles(configuration)
|
||||||
|
|
||||||
|
if configuration.name in roles:
|
||||||
|
|
||||||
|
role_id = roles[configuration.name]['ID']
|
||||||
|
|
||||||
|
if not configuration.check_mode:
|
||||||
|
url = '%s/acl/role/%s' % (get_consul_url(configuration), role_id)
|
||||||
|
headers = get_auth_headers(configuration)
|
||||||
|
response = requests.delete(url, headers=headers, verify=configuration.validate_certs)
|
||||||
|
handle_consul_response_error(response)
|
||||||
|
|
||||||
|
changed = True
|
||||||
|
else:
|
||||||
|
changed = False
|
||||||
|
return Output(changed=changed, operation=REMOVE_OPERATION)
|
||||||
|
|
||||||
|
|
||||||
|
def get_roles(configuration):
|
||||||
|
url = '%s/acl/roles' % get_consul_url(configuration)
|
||||||
|
headers = get_auth_headers(configuration)
|
||||||
|
response = requests.get(url, headers=headers, verify=configuration.validate_certs)
|
||||||
|
handle_consul_response_error(response)
|
||||||
|
roles = response.json()
|
||||||
|
existing_roles_mapped_by_id = dict((role['Name'], role) for role in roles if role['Name'] is not None)
|
||||||
|
return existing_roles_mapped_by_id
|
||||||
|
|
||||||
|
|
||||||
|
def get_consul_version(configuration):
|
||||||
|
url = '%s/agent/self' % get_consul_url(configuration)
|
||||||
|
headers = get_auth_headers(configuration)
|
||||||
|
response = requests.get(url, headers=headers, verify=configuration.validate_certs)
|
||||||
|
handle_consul_response_error(response)
|
||||||
|
config = response.json()["Config"]
|
||||||
|
return ConsulVersion(config["Version"])
|
||||||
|
|
||||||
|
|
||||||
|
def set_role(configuration):
|
||||||
|
roles = get_roles(configuration)
|
||||||
|
|
||||||
|
if configuration.name in roles:
|
||||||
|
role = roles[configuration.name]
|
||||||
|
return update_role(role, configuration)
|
||||||
|
else:
|
||||||
|
return create_role(configuration)
|
||||||
|
|
||||||
|
|
||||||
|
class ConsulVersion:
|
||||||
|
def __init__(self, version_string):
|
||||||
|
split = version_string.split('.')
|
||||||
|
self.major = split[0]
|
||||||
|
self.minor = split[1]
|
||||||
|
self.patch = split[2]
|
||||||
|
|
||||||
|
def __ge__(self, other):
|
||||||
|
return int(self.major + self.minor +
|
||||||
|
self.patch) >= int(other.major + other.minor + other.patch)
|
||||||
|
|
||||||
|
def __le__(self, other):
|
||||||
|
return int(self.major + self.minor +
|
||||||
|
self.patch) <= int(other.major + other.minor + other.patch)
|
||||||
|
|
||||||
|
|
||||||
|
class ServiceIdentity:
|
||||||
|
def __init__(self, input):
|
||||||
|
if not isinstance(input, dict) or 'name' not in input:
|
||||||
|
raise ValueError(
|
||||||
|
"Each element of service_identities must be a dict with the keys name and optionally datacenters")
|
||||||
|
self.name = input["name"]
|
||||||
|
self.datacenters = input["datacenters"] if "datacenters" in input else None
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return {
|
||||||
|
"ServiceName": self.name,
|
||||||
|
"Datacenters": self.datacenters
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class NodeIdentity:
|
||||||
|
def __init__(self, input):
|
||||||
|
if not isinstance(input, dict) or 'name' not in input:
|
||||||
|
raise ValueError(
|
||||||
|
"Each element of node_identities must be a dict with the keys name and optionally datacenter")
|
||||||
|
self.name = input["name"]
|
||||||
|
self.datacenter = input["datacenter"] if "datacenter" in input else None
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return {
|
||||||
|
"NodeName": self.name,
|
||||||
|
"Datacenter": self.datacenter
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class RoleLink:
|
||||||
|
def __init__(self, dict):
|
||||||
|
self.id = dict.get("id", None)
|
||||||
|
self.name = dict.get("name", None)
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return {
|
||||||
|
"ID": self.id,
|
||||||
|
"Name": self.name
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class PolicyLink:
|
||||||
|
def __init__(self, dict):
|
||||||
|
self.id = dict.get("id", None)
|
||||||
|
self.name = dict.get("name", None)
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return {
|
||||||
|
"ID": self.id,
|
||||||
|
"Name": self.name
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Configuration:
|
||||||
|
"""
|
||||||
|
Configuration for this module.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, token=None, host=None, scheme=None, validate_certs=None, name=None, description=None, port=None,
|
||||||
|
policies=None, service_identities=None, node_identities=None, state=None, check_mode=None):
|
||||||
|
self.token = token # type: str
|
||||||
|
self.host = host # type: str
|
||||||
|
self.port = port # type: int
|
||||||
|
self.scheme = scheme # type: str
|
||||||
|
self.validate_certs = validate_certs # type: bool
|
||||||
|
self.name = name # type: str
|
||||||
|
self.description = description # type: str
|
||||||
|
if policies is not None:
|
||||||
|
self.policies = [PolicyLink(p) for p in policies] # type: list(PolicyLink)
|
||||||
|
else:
|
||||||
|
self.policies = [None]
|
||||||
|
if service_identities is not None:
|
||||||
|
self.service_identities = [ServiceIdentity(s) for s in service_identities] # type: list(ServiceIdentity)
|
||||||
|
else:
|
||||||
|
self.service_identities = [None]
|
||||||
|
if node_identities is not None:
|
||||||
|
self.node_identities = [NodeIdentity(n) for n in node_identities] # type: list(NodeIdentity)
|
||||||
|
else:
|
||||||
|
self.node_identities = [None]
|
||||||
|
self.state = state # type: str
|
||||||
|
self.check_mode = check_mode # type: bool
|
||||||
|
|
||||||
|
|
||||||
|
class Output:
|
||||||
|
"""
|
||||||
|
Output of an action of this module.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, changed=None, operation=None, role=None):
|
||||||
|
self.changed = changed # type: bool
|
||||||
|
self.operation = operation # type: str
|
||||||
|
self.role = role # type: dict
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""
|
||||||
|
Main method.
|
||||||
|
"""
|
||||||
|
module = AnsibleModule(_ARGUMENT_SPEC, supports_check_mode=True)
|
||||||
|
|
||||||
|
if not HAS_REQUESTS:
|
||||||
|
module.fail_json(msg=missing_required_lib("requests"),
|
||||||
|
exception=REQUESTS_IMP_ERR)
|
||||||
|
|
||||||
|
try:
|
||||||
|
configuration = Configuration(
|
||||||
|
token=module.params.get(TOKEN_PARAMETER_NAME),
|
||||||
|
host=module.params.get(HOST_PARAMETER_NAME),
|
||||||
|
port=module.params.get(PORT_PARAMETER_NAME),
|
||||||
|
scheme=module.params.get(SCHEME_PARAMETER_NAME),
|
||||||
|
validate_certs=module.params.get(VALIDATE_CERTS_PARAMETER_NAME),
|
||||||
|
name=module.params.get(NAME_PARAMETER_NAME),
|
||||||
|
description=module.params.get(DESCRIPTION_PARAMETER_NAME),
|
||||||
|
policies=module.params.get(POLICIES_PARAMETER_NAME),
|
||||||
|
service_identities=module.params.get(SERVICE_IDENTITIES_PARAMETER_NAME),
|
||||||
|
node_identities=module.params.get(NODE_IDENTITIES_PARAMETER_NAME),
|
||||||
|
state=module.params.get(STATE_PARAMETER_NAME),
|
||||||
|
check_mode=module.check_mode
|
||||||
|
|
||||||
|
)
|
||||||
|
except ValueError as err:
|
||||||
|
module.fail_json(msg='Configuration error: %s' % str(err))
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
version = get_consul_version(configuration)
|
||||||
|
configuration.version = version
|
||||||
|
|
||||||
|
if configuration.state == PRESENT_STATE_VALUE:
|
||||||
|
output = set_role(configuration)
|
||||||
|
else:
|
||||||
|
output = remove_role(configuration)
|
||||||
|
except ConnectionError as e:
|
||||||
|
module.fail_json(msg='Could not connect to consul agent at %s:%s, error was %s' % (
|
||||||
|
configuration.host, configuration.port, str(e)))
|
||||||
|
raise
|
||||||
|
|
||||||
|
return_values = dict(changed=output.changed, operation=output.operation, role=output.role)
|
||||||
|
module.exit_json(**return_values)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -183,8 +183,9 @@ class CPANMinus(ModuleHelper):
|
|||||||
if v.name and v.from_path:
|
if v.name and v.from_path:
|
||||||
self.do_raise("Parameters 'name' and 'from_path' are mutually exclusive when 'mode=new'")
|
self.do_raise("Parameters 'name' and 'from_path' are mutually exclusive when 'mode=new'")
|
||||||
|
|
||||||
self.command = self.get_bin_path(v.executable if v.executable else self.command)
|
self.command = v.executable if v.executable else self.command
|
||||||
self.vars.set("binary", self.command)
|
self.runner = CmdRunner(self.module, self.command, self.command_args_formats, check_rc=True)
|
||||||
|
self.vars.binary = self.runner.binary
|
||||||
|
|
||||||
def _is_package_installed(self, name, locallib, version):
|
def _is_package_installed(self, name, locallib, version):
|
||||||
def process(rc, out, err):
|
def process(rc, out, err):
|
||||||
@@ -220,8 +221,6 @@ class CPANMinus(ModuleHelper):
|
|||||||
self.do_raise(msg=err, cmd=self.vars.cmd_args)
|
self.do_raise(msg=err, cmd=self.vars.cmd_args)
|
||||||
return 'is up to date' not in err and 'is up to date' not in out
|
return 'is up to date' not in err and 'is up to date' not in out
|
||||||
|
|
||||||
runner = CmdRunner(self.module, self.command, self.command_args_formats, check_rc=True)
|
|
||||||
|
|
||||||
v = self.vars
|
v = self.vars
|
||||||
pkg_param = 'from_path' if v.from_path else 'name'
|
pkg_param = 'from_path' if v.from_path else 'name'
|
||||||
|
|
||||||
@@ -235,7 +234,7 @@ class CPANMinus(ModuleHelper):
|
|||||||
return
|
return
|
||||||
pkg_spec = self.sanitize_pkg_spec_version(v[pkg_param], v.version)
|
pkg_spec = self.sanitize_pkg_spec_version(v[pkg_param], v.version)
|
||||||
|
|
||||||
with runner(['notest', 'locallib', 'mirror', 'mirror_only', 'installdeps', 'pkg_spec'], output_process=process) as ctx:
|
with self.runner(['notest', 'locallib', 'mirror', 'mirror_only', 'installdeps', 'pkg_spec'], output_process=process) as ctx:
|
||||||
self.changed = ctx.run(pkg_spec=pkg_spec)
|
self.changed = ctx.run(pkg_spec=pkg_spec)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -400,7 +400,7 @@ class DconfPreference(object):
|
|||||||
rc, out, err = dbus_wrapper.run_command(command)
|
rc, out, err = dbus_wrapper.run_command(command)
|
||||||
|
|
||||||
if rc != 0:
|
if rc != 0:
|
||||||
self.module.fail_json(msg='dconf failed while reseting the value with error: %s' % err,
|
self.module.fail_json(msg='dconf failed while resetting the value with error: %s' % err,
|
||||||
out=out,
|
out=out,
|
||||||
err=err)
|
err=err)
|
||||||
|
|
||||||
|
|||||||
@@ -178,7 +178,7 @@ class DNSimpleV2():
|
|||||||
client = Client(sandbox=self.sandbox, email=self.account_email, access_token=self.account_api_token, user_agent="ansible/community.general")
|
client = Client(sandbox=self.sandbox, email=self.account_email, access_token=self.account_api_token, user_agent="ansible/community.general")
|
||||||
else:
|
else:
|
||||||
msg = "Option account_email or account_api_token not provided. " \
|
msg = "Option account_email or account_api_token not provided. " \
|
||||||
"Dnsimple authentiction with a .dnsimple config file is not " \
|
"Dnsimple authentication with a .dnsimple config file is not " \
|
||||||
"supported with dnsimple-python>=2.0.0"
|
"supported with dnsimple-python>=2.0.0"
|
||||||
raise DNSimpleException(msg)
|
raise DNSimpleException(msg)
|
||||||
client.identity.whoami()
|
client.identity.whoami()
|
||||||
@@ -225,24 +225,24 @@ class DNSimpleV2():
|
|||||||
self.client.domains.delete_domain(self.account.id, domain)
|
self.client.domains.delete_domain(self.account.id, domain)
|
||||||
|
|
||||||
def get_records(self, zone, dnsimple_filter=None):
|
def get_records(self, zone, dnsimple_filter=None):
|
||||||
"""return dns ressource records which match a specified filter"""
|
"""return dns resource records which match a specified filter"""
|
||||||
records_list = self._get_paginated_result(self.client.zones.list_records,
|
records_list = self._get_paginated_result(self.client.zones.list_records,
|
||||||
account_id=self.account.id,
|
account_id=self.account.id,
|
||||||
zone=zone, filter=dnsimple_filter)
|
zone=zone, filter=dnsimple_filter)
|
||||||
return [d.__dict__ for d in records_list]
|
return [d.__dict__ for d in records_list]
|
||||||
|
|
||||||
def delete_record(self, domain, rid):
|
def delete_record(self, domain, rid):
|
||||||
"""delete a single dns ressource record"""
|
"""delete a single dns resource record"""
|
||||||
self.client.zones.delete_record(self.account.id, domain, rid)
|
self.client.zones.delete_record(self.account.id, domain, rid)
|
||||||
|
|
||||||
def update_record(self, domain, rid, ttl=None, priority=None):
|
def update_record(self, domain, rid, ttl=None, priority=None):
|
||||||
"""update a single dns ressource record"""
|
"""update a single dns resource record"""
|
||||||
zr = ZoneRecordUpdateInput(ttl=ttl, priority=priority)
|
zr = ZoneRecordUpdateInput(ttl=ttl, priority=priority)
|
||||||
result = self.client.zones.update_record(self.account.id, str(domain), str(rid), zr).data.__dict__
|
result = self.client.zones.update_record(self.account.id, str(domain), str(rid), zr).data.__dict__
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def create_record(self, domain, name, record_type, content, ttl=None, priority=None):
|
def create_record(self, domain, name, record_type, content, ttl=None, priority=None):
|
||||||
"""create a single dns ressource record"""
|
"""create a single dns resource record"""
|
||||||
zr = ZoneRecordInput(name=name, type=record_type, content=content, ttl=ttl, priority=priority)
|
zr = ZoneRecordInput(name=name, type=record_type, content=content, ttl=ttl, priority=priority)
|
||||||
return self.client.zones.create_record(self.account.id, str(domain), zr).data.__dict__
|
return self.client.zones.create_record(self.account.id, str(domain), zr).data.__dict__
|
||||||
|
|
||||||
|
|||||||
@@ -509,15 +509,15 @@ class DME2(object):
|
|||||||
return json.dumps(data, separators=(',', ':'))
|
return json.dumps(data, separators=(',', ':'))
|
||||||
|
|
||||||
def createRecord(self, data):
|
def createRecord(self, data):
|
||||||
# @TODO update the cache w/ resultant record + id when impleneted
|
# @TODO update the cache w/ resultant record + id when implemented
|
||||||
return self.query(self.record_url, 'POST', data)
|
return self.query(self.record_url, 'POST', data)
|
||||||
|
|
||||||
def updateRecord(self, record_id, data):
|
def updateRecord(self, record_id, data):
|
||||||
# @TODO update the cache w/ resultant record + id when impleneted
|
# @TODO update the cache w/ resultant record + id when implemented
|
||||||
return self.query(self.record_url + '/' + str(record_id), 'PUT', data)
|
return self.query(self.record_url + '/' + str(record_id), 'PUT', data)
|
||||||
|
|
||||||
def deleteRecord(self, record_id):
|
def deleteRecord(self, record_id):
|
||||||
# @TODO remove record from the cache when impleneted
|
# @TODO remove record from the cache when implemented
|
||||||
return self.query(self.record_url + '/' + str(record_id), 'DELETE')
|
return self.query(self.record_url + '/' + str(record_id), 'DELETE')
|
||||||
|
|
||||||
def getMonitor(self, record_id):
|
def getMonitor(self, record_id):
|
||||||
|
|||||||
@@ -78,6 +78,7 @@ EXAMPLES = '''
|
|||||||
import syslog
|
import syslog
|
||||||
|
|
||||||
from ansible.module_utils.basic import AnsibleModule
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.general.plugins.module_utils.cmd_runner import CmdRunner, cmd_runner_fmt
|
||||||
|
|
||||||
|
|
||||||
class EjabberdUser(object):
|
class EjabberdUser(object):
|
||||||
@@ -95,6 +96,17 @@ class EjabberdUser(object):
|
|||||||
self.host = module.params.get('host')
|
self.host = module.params.get('host')
|
||||||
self.user = module.params.get('username')
|
self.user = module.params.get('username')
|
||||||
self.pwd = module.params.get('password')
|
self.pwd = module.params.get('password')
|
||||||
|
self.runner = CmdRunner(
|
||||||
|
module,
|
||||||
|
command="ejabberdctl",
|
||||||
|
arg_formats=dict(
|
||||||
|
cmd=cmd_runner_fmt.as_list(),
|
||||||
|
host=cmd_runner_fmt.as_list(),
|
||||||
|
user=cmd_runner_fmt.as_list(),
|
||||||
|
pwd=cmd_runner_fmt.as_list(),
|
||||||
|
),
|
||||||
|
check_rc=False,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def changed(self):
|
def changed(self):
|
||||||
@@ -102,7 +114,7 @@ class EjabberdUser(object):
|
|||||||
changed. It will return True if the user does not match the supplied
|
changed. It will return True if the user does not match the supplied
|
||||||
credentials and False if it does not
|
credentials and False if it does not
|
||||||
"""
|
"""
|
||||||
return self.run_command('check_password', [self.user, self.host, self.pwd])
|
return self.run_command('check_password', 'user host pwd', (lambda rc, out, err: bool(rc)))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def exists(self):
|
def exists(self):
|
||||||
@@ -110,7 +122,7 @@ class EjabberdUser(object):
|
|||||||
host specified. If the user exists True is returned, otherwise False
|
host specified. If the user exists True is returned, otherwise False
|
||||||
is returned
|
is returned
|
||||||
"""
|
"""
|
||||||
return self.run_command('check_account', [self.user, self.host])
|
return self.run_command('check_account', 'user host', (lambda rc, out, err: not bool(rc)))
|
||||||
|
|
||||||
def log(self, entry):
|
def log(self, entry):
|
||||||
""" This method will log information to the local syslog facility """
|
""" This method will log information to the local syslog facility """
|
||||||
@@ -118,29 +130,36 @@ class EjabberdUser(object):
|
|||||||
syslog.openlog('ansible-%s' % self.module._name)
|
syslog.openlog('ansible-%s' % self.module._name)
|
||||||
syslog.syslog(syslog.LOG_NOTICE, entry)
|
syslog.syslog(syslog.LOG_NOTICE, entry)
|
||||||
|
|
||||||
def run_command(self, cmd, options):
|
def run_command(self, cmd, options, process=None):
|
||||||
""" This method will run the any command specified and return the
|
""" This method will run the any command specified and return the
|
||||||
returns using the Ansible common module
|
returns using the Ansible common module
|
||||||
"""
|
"""
|
||||||
cmd = [self.module.get_bin_path('ejabberdctl', required=True), cmd] + options
|
def _proc(*a):
|
||||||
self.log('command: %s' % " ".join(cmd))
|
return a
|
||||||
return self.module.run_command(cmd)
|
|
||||||
|
if process is None:
|
||||||
|
process = _proc
|
||||||
|
|
||||||
|
with self.runner("cmd " + options, output_process=process) as ctx:
|
||||||
|
res = ctx.run(cmd=cmd, host=self.host, user=self.user, pwd=self.pwd)
|
||||||
|
self.log('command: %s' % " ".join(ctx.run_info['cmd']))
|
||||||
|
return res
|
||||||
|
|
||||||
def update(self):
|
def update(self):
|
||||||
""" The update method will update the credentials for the user provided
|
""" The update method will update the credentials for the user provided
|
||||||
"""
|
"""
|
||||||
return self.run_command('change_password', [self.user, self.host, self.pwd])
|
return self.run_command('change_password', 'user host pwd')
|
||||||
|
|
||||||
def create(self):
|
def create(self):
|
||||||
""" The create method will create a new user on the host with the
|
""" The create method will create a new user on the host with the
|
||||||
password provided
|
password provided
|
||||||
"""
|
"""
|
||||||
return self.run_command('register', [self.user, self.host, self.pwd])
|
return self.run_command('register', 'user host pwd')
|
||||||
|
|
||||||
def delete(self):
|
def delete(self):
|
||||||
""" The delete method will delete the user from the host
|
""" The delete method will delete the user from the host
|
||||||
"""
|
"""
|
||||||
return self.run_command('unregister', [self.user, self.host])
|
return self.run_command('unregister', 'user host')
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@@ -150,7 +169,7 @@ def main():
|
|||||||
username=dict(required=True, type='str'),
|
username=dict(required=True, type='str'),
|
||||||
password=dict(type='str', no_log=True),
|
password=dict(type='str', no_log=True),
|
||||||
state=dict(default='present', choices=['present', 'absent']),
|
state=dict(default='present', choices=['present', 'absent']),
|
||||||
logging=dict(default=False, type='bool') # deprecate in favour of c.g.syslogger?
|
logging=dict(default=False, type='bool', removed_in_version='10.0.0', removed_from_collection='community.general'),
|
||||||
),
|
),
|
||||||
required_if=[
|
required_if=[
|
||||||
('state', 'present', ['password']),
|
('state', 'present', ['password']),
|
||||||
|
|||||||
108
plugins/modules/gio_mime.py
Normal file
108
plugins/modules/gio_mime.py
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright (c) 2022, Alexei Znamensky <russoz@gmail.com>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
DOCUMENTATION = '''
|
||||||
|
module: gio_mime
|
||||||
|
author:
|
||||||
|
- "Alexei Znamensky (@russoz)"
|
||||||
|
short_description: Set default handler for MIME type, for applications using Gnome GIO
|
||||||
|
version_added: 7.5.0
|
||||||
|
description:
|
||||||
|
- This module allows configuring the default handler for a specific MIME type, to be used by applications built with th Gnome GIO API.
|
||||||
|
extends_documentation_fragment:
|
||||||
|
- community.general.attributes
|
||||||
|
attributes:
|
||||||
|
check_mode:
|
||||||
|
support: full
|
||||||
|
diff_mode:
|
||||||
|
support: full
|
||||||
|
options:
|
||||||
|
mime_type:
|
||||||
|
description:
|
||||||
|
- MIME type for which a default handler will be set.
|
||||||
|
type: str
|
||||||
|
required: true
|
||||||
|
handler:
|
||||||
|
description:
|
||||||
|
- Default handler will be set for the MIME type.
|
||||||
|
type: str
|
||||||
|
required: true
|
||||||
|
notes:
|
||||||
|
- This module is a thin wrapper around the C(gio mime) command (and subcommand).
|
||||||
|
- See man gio(1) for more details.
|
||||||
|
seealso:
|
||||||
|
- name: GIO Documentation
|
||||||
|
description: Reference documentation for the GIO API..
|
||||||
|
link: https://docs.gtk.org/gio/
|
||||||
|
'''
|
||||||
|
|
||||||
|
EXAMPLES = """
|
||||||
|
- name: Set chrome as the default handler for https
|
||||||
|
community.general.gio_mime:
|
||||||
|
mime_type: x-scheme-handler/https
|
||||||
|
handler: google-chrome.desktop
|
||||||
|
register: result
|
||||||
|
"""
|
||||||
|
|
||||||
|
RETURN = '''
|
||||||
|
handler:
|
||||||
|
description:
|
||||||
|
- The handler set as default.
|
||||||
|
returned: success
|
||||||
|
type: str
|
||||||
|
sample: google-chrome.desktop
|
||||||
|
stdout:
|
||||||
|
description:
|
||||||
|
- The output of the C(gio) command.
|
||||||
|
returned: success
|
||||||
|
type: str
|
||||||
|
sample: Set google-chrome.desktop as the default for x-scheme-handler/https
|
||||||
|
stderr:
|
||||||
|
description:
|
||||||
|
- The error output of the C(gio) command.
|
||||||
|
returned: failure
|
||||||
|
type: str
|
||||||
|
sample: 'gio: Failed to load info for handler "never-existed.desktop"'
|
||||||
|
'''
|
||||||
|
|
||||||
|
from ansible_collections.community.general.plugins.module_utils.module_helper import ModuleHelper
|
||||||
|
from ansible_collections.community.general.plugins.module_utils.gio_mime import gio_mime_runner, gio_mime_get
|
||||||
|
|
||||||
|
|
||||||
|
class GioMime(ModuleHelper):
|
||||||
|
output_params = ['handler']
|
||||||
|
module = dict(
|
||||||
|
argument_spec=dict(
|
||||||
|
mime_type=dict(type='str', required=True),
|
||||||
|
handler=dict(type='str', required=True),
|
||||||
|
),
|
||||||
|
supports_check_mode=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init_module__(self):
|
||||||
|
self.runner = gio_mime_runner(self.module, check_rc=True)
|
||||||
|
self.vars.set_meta("handler", initial_value=gio_mime_get(self.runner, self.vars.mime_type), diff=True, change=True)
|
||||||
|
|
||||||
|
def __run__(self):
|
||||||
|
check_mode_return = (0, 'Module executed in check mode', '')
|
||||||
|
if self.vars.has_changed("handler"):
|
||||||
|
with self.runner.context(args_order=["mime_type", "handler"], check_mode_skip=True, check_mode_return=check_mode_return) as ctx:
|
||||||
|
rc, out, err = ctx.run()
|
||||||
|
self.vars.stdout = out
|
||||||
|
self.vars.stderr = err
|
||||||
|
if self.verbosity >= 4:
|
||||||
|
self.vars.run_info = ctx.run_info
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
GioMime.execute()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
@@ -227,7 +227,7 @@ class GithubDeployKey(object):
|
|||||||
yield self.module.from_json(resp.read())
|
yield self.module.from_json(resp.read())
|
||||||
|
|
||||||
links = {}
|
links = {}
|
||||||
for x, y in findall(r'<([^>]+)>;\s*rel="(\w+)"', info["link"]):
|
for x, y in findall(r'<([^>]+)>;\s*rel="(\w+)"', info.get("link", '')):
|
||||||
links[y] = x
|
links[y] = x
|
||||||
|
|
||||||
url = links.get('next')
|
url = links.get('next')
|
||||||
|
|||||||
@@ -94,6 +94,13 @@ options:
|
|||||||
- This option is only used on creation, not for updates.
|
- This option is only used on creation, not for updates.
|
||||||
type: path
|
type: path
|
||||||
version_added: 4.2.0
|
version_added: 4.2.0
|
||||||
|
force_delete:
|
||||||
|
description:
|
||||||
|
- Force delete group even if projects in it.
|
||||||
|
- Used only when O(state=absent).
|
||||||
|
type: bool
|
||||||
|
default: false
|
||||||
|
version_added: 7.5.0
|
||||||
'''
|
'''
|
||||||
|
|
||||||
EXAMPLES = '''
|
EXAMPLES = '''
|
||||||
@@ -279,12 +286,18 @@ class GitLabGroup(object):
|
|||||||
|
|
||||||
return (changed, group)
|
return (changed, group)
|
||||||
|
|
||||||
def delete_group(self):
|
'''
|
||||||
|
@param force To delete even if projects inside
|
||||||
|
'''
|
||||||
|
def delete_group(self, force=False):
|
||||||
group = self.group_object
|
group = self.group_object
|
||||||
|
|
||||||
if len(group.projects.list(all=False)) >= 1:
|
if not force and len(group.projects.list(all=False)) >= 1:
|
||||||
self._module.fail_json(
|
self._module.fail_json(
|
||||||
msg="There are still projects in this group. These needs to be moved or deleted before this group can be removed.")
|
msg=("There are still projects in this group. "
|
||||||
|
"These needs to be moved or deleted before this group can be removed. "
|
||||||
|
"Use 'force_delete' to 'true' to force deletion of existing projects.")
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
if self._module.check_mode:
|
if self._module.check_mode:
|
||||||
return True
|
return True
|
||||||
@@ -295,7 +308,7 @@ class GitLabGroup(object):
|
|||||||
self._module.fail_json(msg="Failed to delete group: %s " % to_native(e))
|
self._module.fail_json(msg="Failed to delete group: %s " % to_native(e))
|
||||||
|
|
||||||
'''
|
'''
|
||||||
@param name Name of the groupe
|
@param name Name of the group
|
||||||
@param full_path Complete path of the Group including parent group path. <parent_path>/<group_path>
|
@param full_path Complete path of the Group including parent group path. <parent_path>/<group_path>
|
||||||
'''
|
'''
|
||||||
def exists_group(self, project_identifier):
|
def exists_group(self, project_identifier):
|
||||||
@@ -322,6 +335,7 @@ def main():
|
|||||||
subgroup_creation_level=dict(type='str', choices=['maintainer', 'owner']),
|
subgroup_creation_level=dict(type='str', choices=['maintainer', 'owner']),
|
||||||
require_two_factor_authentication=dict(type='bool'),
|
require_two_factor_authentication=dict(type='bool'),
|
||||||
avatar_path=dict(type='path'),
|
avatar_path=dict(type='path'),
|
||||||
|
force_delete=dict(type='bool', default=False),
|
||||||
))
|
))
|
||||||
|
|
||||||
module = AnsibleModule(
|
module = AnsibleModule(
|
||||||
@@ -354,6 +368,7 @@ def main():
|
|||||||
subgroup_creation_level = module.params['subgroup_creation_level']
|
subgroup_creation_level = module.params['subgroup_creation_level']
|
||||||
require_two_factor_authentication = module.params['require_two_factor_authentication']
|
require_two_factor_authentication = module.params['require_two_factor_authentication']
|
||||||
avatar_path = module.params['avatar_path']
|
avatar_path = module.params['avatar_path']
|
||||||
|
force_delete = module.params['force_delete']
|
||||||
|
|
||||||
gitlab_instance = gitlab_authentication(module)
|
gitlab_instance = gitlab_authentication(module)
|
||||||
|
|
||||||
@@ -375,7 +390,7 @@ def main():
|
|||||||
|
|
||||||
if state == 'absent':
|
if state == 'absent':
|
||||||
if group_exists:
|
if group_exists:
|
||||||
gitlab_group.delete_group()
|
gitlab_group.delete_group(force=force_delete)
|
||||||
module.exit_json(changed=True, msg="Successfully deleted group %s" % group_name)
|
module.exit_json(changed=True, msg="Successfully deleted group %s" % group_name)
|
||||||
else:
|
else:
|
||||||
module.exit_json(changed=False, msg="Group deleted or does not exists")
|
module.exit_json(changed=False, msg="Group deleted or does not exists")
|
||||||
|
|||||||
@@ -276,11 +276,11 @@ def main():
|
|||||||
ensure_gitlab_package(module)
|
ensure_gitlab_package(module)
|
||||||
|
|
||||||
access_level_int = {
|
access_level_int = {
|
||||||
'guest': gitlab.GUEST_ACCESS,
|
'guest': gitlab.const.GUEST_ACCESS,
|
||||||
'reporter': gitlab.REPORTER_ACCESS,
|
'reporter': gitlab.const.REPORTER_ACCESS,
|
||||||
'developer': gitlab.DEVELOPER_ACCESS,
|
'developer': gitlab.const.DEVELOPER_ACCESS,
|
||||||
'maintainer': gitlab.MAINTAINER_ACCESS,
|
'maintainer': gitlab.const.MAINTAINER_ACCESS,
|
||||||
'owner': gitlab.OWNER_ACCESS,
|
'owner': gitlab.const.OWNER_ACCESS,
|
||||||
}
|
}
|
||||||
|
|
||||||
gitlab_group = module.params['gitlab_group']
|
gitlab_group = module.params['gitlab_group']
|
||||||
|
|||||||
@@ -53,13 +53,14 @@ options:
|
|||||||
type: bool
|
type: bool
|
||||||
vars:
|
vars:
|
||||||
description:
|
description:
|
||||||
- When the list element is a simple key-value pair, set masked and protected to false.
|
- When the list element is a simple key-value pair, masked, raw and protected will be set to false.
|
||||||
- When the list element is a dict with the keys C(value), C(masked) and C(protected), the user can
|
- When the list element is a dict with the keys C(value), C(masked), C(raw) and C(protected), the user can
|
||||||
have full control about whether a value should be masked, protected or both.
|
have full control about whether a value should be masked, raw, protected or both.
|
||||||
- Support for group variables requires GitLab >= 9.5.
|
- Support for group variables requires GitLab >= 9.5.
|
||||||
- Support for environment_scope requires GitLab Premium >= 13.11.
|
- Support for environment_scope requires GitLab Premium >= 13.11.
|
||||||
- Support for protected values requires GitLab >= 9.3.
|
- Support for protected values requires GitLab >= 9.3.
|
||||||
- Support for masked values requires GitLab >= 11.10.
|
- Support for masked values requires GitLab >= 11.10.
|
||||||
|
- Support for raw values requires GitLab >= 15.7.
|
||||||
- A C(value) must be a string or a number.
|
- A C(value) must be a string or a number.
|
||||||
- Field C(variable_type) must be a string with either V(env_var), which is the default, or V(file).
|
- Field C(variable_type) must be a string with either V(env_var), which is the default, or V(file).
|
||||||
- When a value is masked, it must be in Base64 and have a length of at least 8 characters.
|
- When a value is masked, it must be in Base64 and have a length of at least 8 characters.
|
||||||
@@ -70,7 +71,7 @@ options:
|
|||||||
version_added: 4.5.0
|
version_added: 4.5.0
|
||||||
description:
|
description:
|
||||||
- A list of dictionaries that represents CI/CD variables.
|
- A list of dictionaries that represents CI/CD variables.
|
||||||
- This modules works internal with this sructure, even if the older O(vars) parameter is used.
|
- This modules works internal with this structure, even if the older O(vars) parameter is used.
|
||||||
default: []
|
default: []
|
||||||
type: list
|
type: list
|
||||||
elements: dict
|
elements: dict
|
||||||
@@ -87,17 +88,24 @@ options:
|
|||||||
type: str
|
type: str
|
||||||
masked:
|
masked:
|
||||||
description:
|
description:
|
||||||
- Wether variable value is masked or not.
|
- Whether variable value is masked or not.
|
||||||
type: bool
|
type: bool
|
||||||
default: false
|
default: false
|
||||||
protected:
|
protected:
|
||||||
description:
|
description:
|
||||||
- Wether variable value is protected or not.
|
- Whether variable value is protected or not.
|
||||||
type: bool
|
type: bool
|
||||||
default: false
|
default: false
|
||||||
|
raw:
|
||||||
|
description:
|
||||||
|
- Whether variable value is raw or not.
|
||||||
|
- Support for raw values requires GitLab >= 15.7.
|
||||||
|
type: bool
|
||||||
|
default: false
|
||||||
|
version_added: '7.4.0'
|
||||||
variable_type:
|
variable_type:
|
||||||
description:
|
description:
|
||||||
- Wether a variable is an environment variable (V(env_var)) or a file (V(file)).
|
- Whether a variable is an environment variable (V(env_var)) or a file (V(file)).
|
||||||
type: str
|
type: str
|
||||||
choices: [ "env_var", "file" ]
|
choices: [ "env_var", "file" ]
|
||||||
default: env_var
|
default: env_var
|
||||||
@@ -126,6 +134,38 @@ EXAMPLES = r'''
|
|||||||
variable_type: env_var
|
variable_type: env_var
|
||||||
environment_scope: production
|
environment_scope: production
|
||||||
|
|
||||||
|
- name: Set or update some CI/CD variables with raw value
|
||||||
|
community.general.gitlab_group_variable:
|
||||||
|
api_url: https://gitlab.com
|
||||||
|
api_token: secret_access_token
|
||||||
|
group: scodeman/testgroup/
|
||||||
|
purge: false
|
||||||
|
vars:
|
||||||
|
ACCESS_KEY_ID: abc123
|
||||||
|
SECRET_ACCESS_KEY:
|
||||||
|
value: 3214cbad
|
||||||
|
masked: true
|
||||||
|
protected: true
|
||||||
|
raw: true
|
||||||
|
variable_type: env_var
|
||||||
|
environment_scope: '*'
|
||||||
|
|
||||||
|
- name: Set or update some CI/CD variables with expandable value
|
||||||
|
community.general.gitlab_group_variable:
|
||||||
|
api_url: https://gitlab.com
|
||||||
|
api_token: secret_access_token
|
||||||
|
group: scodeman/testgroup/
|
||||||
|
purge: false
|
||||||
|
vars:
|
||||||
|
ACCESS_KEY_ID: abc123
|
||||||
|
SECRET_ACCESS_KEY:
|
||||||
|
value: '$MY_OTHER_VARIABLE'
|
||||||
|
masked: true
|
||||||
|
protected: true
|
||||||
|
raw: false
|
||||||
|
variable_type: env_var
|
||||||
|
environment_scope: '*'
|
||||||
|
|
||||||
- name: Delete one variable
|
- name: Delete one variable
|
||||||
community.general.gitlab_group_variable:
|
community.general.gitlab_group_variable:
|
||||||
api_url: https://gitlab.com
|
api_url: https://gitlab.com
|
||||||
@@ -199,6 +239,7 @@ class GitlabGroupVariables(object):
|
|||||||
"value": var_obj.get('value'),
|
"value": var_obj.get('value'),
|
||||||
"masked": var_obj.get('masked'),
|
"masked": var_obj.get('masked'),
|
||||||
"protected": var_obj.get('protected'),
|
"protected": var_obj.get('protected'),
|
||||||
|
"raw": var_obj.get('raw'),
|
||||||
"variable_type": var_obj.get('variable_type'),
|
"variable_type": var_obj.get('variable_type'),
|
||||||
}
|
}
|
||||||
if var_obj.get('environment_scope') is not None:
|
if var_obj.get('environment_scope') is not None:
|
||||||
@@ -267,6 +308,8 @@ def native_python_main(this_gitlab, purge, requested_variables, state, module):
|
|||||||
item['value'] = str(item.get('value'))
|
item['value'] = str(item.get('value'))
|
||||||
if item.get('protected') is None:
|
if item.get('protected') is None:
|
||||||
item['protected'] = False
|
item['protected'] = False
|
||||||
|
if item.get('raw') is None:
|
||||||
|
item['raw'] = False
|
||||||
if item.get('masked') is None:
|
if item.get('masked') is None:
|
||||||
item['masked'] = False
|
item['masked'] = False
|
||||||
if item.get('environment_scope') is None:
|
if item.get('environment_scope') is None:
|
||||||
@@ -338,11 +381,14 @@ def main():
|
|||||||
group=dict(type='str', required=True),
|
group=dict(type='str', required=True),
|
||||||
purge=dict(type='bool', required=False, default=False),
|
purge=dict(type='bool', required=False, default=False),
|
||||||
vars=dict(type='dict', required=False, default=dict(), no_log=True),
|
vars=dict(type='dict', required=False, default=dict(), no_log=True),
|
||||||
|
# please mind whenever changing the variables dict to also change module_utils/gitlab.py's
|
||||||
|
# KNOWN dict in filter_returned_variables or bad evil will happen
|
||||||
variables=dict(type='list', elements='dict', required=False, default=list(), options=dict(
|
variables=dict(type='list', elements='dict', required=False, default=list(), options=dict(
|
||||||
name=dict(type='str', required=True),
|
name=dict(type='str', required=True),
|
||||||
value=dict(type='str', no_log=True),
|
value=dict(type='str', no_log=True),
|
||||||
masked=dict(type='bool', default=False),
|
masked=dict(type='bool', default=False),
|
||||||
protected=dict(type='bool', default=False),
|
protected=dict(type='bool', default=False),
|
||||||
|
raw=dict(type='bool', default=False),
|
||||||
environment_scope=dict(type='str', default='*'),
|
environment_scope=dict(type='str', default='*'),
|
||||||
variable_type=dict(type='str', default='env_var', choices=["env_var", "file"])
|
variable_type=dict(type='str', default='env_var', choices=["env_var", "file"])
|
||||||
)),
|
)),
|
||||||
|
|||||||
@@ -67,17 +67,17 @@ options:
|
|||||||
type: str
|
type: str
|
||||||
masked:
|
masked:
|
||||||
description:
|
description:
|
||||||
- Wether variable value is masked or not.
|
- Whether variable value is masked or not.
|
||||||
type: bool
|
type: bool
|
||||||
default: false
|
default: false
|
||||||
protected:
|
protected:
|
||||||
description:
|
description:
|
||||||
- Wether variable value is protected or not.
|
- Whether variable value is protected or not.
|
||||||
type: bool
|
type: bool
|
||||||
default: false
|
default: false
|
||||||
variable_type:
|
variable_type:
|
||||||
description:
|
description:
|
||||||
- Wether a variable is an environment variable (V(env_var)) or a file (V(file)).
|
- Whether a variable is an environment variable (V(env_var)) or a file (V(file)).
|
||||||
type: str
|
type: str
|
||||||
choices: [ "env_var", "file" ]
|
choices: [ "env_var", "file" ]
|
||||||
default: env_var
|
default: env_var
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ options:
|
|||||||
description:
|
description:
|
||||||
description:
|
description:
|
||||||
- A description for the merge request.
|
- A description for the merge request.
|
||||||
- Gets overriden by a content of file specified at O(description_path), if found.
|
- Gets overridden by a content of file specified at O(description_path), if found.
|
||||||
type: str
|
type: str
|
||||||
description_path:
|
description_path:
|
||||||
description:
|
description:
|
||||||
|
|||||||
@@ -282,10 +282,10 @@ def main():
|
|||||||
ensure_gitlab_package(module)
|
ensure_gitlab_package(module)
|
||||||
|
|
||||||
access_level_int = {
|
access_level_int = {
|
||||||
'guest': gitlab.GUEST_ACCESS,
|
'guest': gitlab.const.GUEST_ACCESS,
|
||||||
'reporter': gitlab.REPORTER_ACCESS,
|
'reporter': gitlab.const.REPORTER_ACCESS,
|
||||||
'developer': gitlab.DEVELOPER_ACCESS,
|
'developer': gitlab.const.DEVELOPER_ACCESS,
|
||||||
'maintainer': gitlab.MAINTAINER_ACCESS,
|
'maintainer': gitlab.const.MAINTAINER_ACCESS,
|
||||||
}
|
}
|
||||||
|
|
||||||
gitlab_project = module.params['project']
|
gitlab_project = module.params['project']
|
||||||
|
|||||||
@@ -51,11 +51,12 @@ options:
|
|||||||
type: bool
|
type: bool
|
||||||
vars:
|
vars:
|
||||||
description:
|
description:
|
||||||
- When the list element is a simple key-value pair, masked and protected will be set to false.
|
- When the list element is a simple key-value pair, masked, raw and protected will be set to false.
|
||||||
- When the list element is a dict with the keys C(value), C(masked) and C(protected), the user can
|
- When the list element is a dict with the keys C(value), C(masked), C(raw) and C(protected), the user can
|
||||||
have full control about whether a value should be masked, protected or both.
|
have full control about whether a value should be masked, raw, protected or both.
|
||||||
- Support for protected values requires GitLab >= 9.3.
|
- Support for protected values requires GitLab >= 9.3.
|
||||||
- Support for masked values requires GitLab >= 11.10.
|
- Support for masked values requires GitLab >= 11.10.
|
||||||
|
- Support for raw values requires GitLab >= 15.7.
|
||||||
- Support for environment_scope requires GitLab Premium >= 13.11.
|
- Support for environment_scope requires GitLab Premium >= 13.11.
|
||||||
- Support for variable_type requires GitLab >= 11.11.
|
- Support for variable_type requires GitLab >= 11.11.
|
||||||
- A C(value) must be a string or a number.
|
- A C(value) must be a string or a number.
|
||||||
@@ -86,19 +87,26 @@ options:
|
|||||||
type: str
|
type: str
|
||||||
masked:
|
masked:
|
||||||
description:
|
description:
|
||||||
- Wether variable value is masked or not.
|
- Whether variable value is masked or not.
|
||||||
- Support for masked values requires GitLab >= 11.10.
|
- Support for masked values requires GitLab >= 11.10.
|
||||||
type: bool
|
type: bool
|
||||||
default: false
|
default: false
|
||||||
protected:
|
protected:
|
||||||
description:
|
description:
|
||||||
- Wether variable value is protected or not.
|
- Whether variable value is protected or not.
|
||||||
- Support for protected values requires GitLab >= 9.3.
|
- Support for protected values requires GitLab >= 9.3.
|
||||||
type: bool
|
type: bool
|
||||||
default: false
|
default: false
|
||||||
|
raw:
|
||||||
|
description:
|
||||||
|
- Whether variable value is raw or not.
|
||||||
|
- Support for raw values requires GitLab >= 15.7.
|
||||||
|
type: bool
|
||||||
|
default: false
|
||||||
|
version_added: '7.4.0'
|
||||||
variable_type:
|
variable_type:
|
||||||
description:
|
description:
|
||||||
- Wether a variable is an environment variable (V(env_var)) or a file (V(file)).
|
- Whether a variable is an environment variable (V(env_var)) or a file (V(file)).
|
||||||
- Support for O(variables[].variable_type) requires GitLab >= 11.11.
|
- Support for O(variables[].variable_type) requires GitLab >= 11.11.
|
||||||
type: str
|
type: str
|
||||||
choices: ["env_var", "file"]
|
choices: ["env_var", "file"]
|
||||||
@@ -143,6 +151,38 @@ EXAMPLES = '''
|
|||||||
variable_type: env_var
|
variable_type: env_var
|
||||||
environment_scope: '*'
|
environment_scope: '*'
|
||||||
|
|
||||||
|
- name: Set or update some CI/CD variables with raw value
|
||||||
|
community.general.gitlab_project_variable:
|
||||||
|
api_url: https://gitlab.com
|
||||||
|
api_token: secret_access_token
|
||||||
|
project: markuman/dotfiles
|
||||||
|
purge: false
|
||||||
|
vars:
|
||||||
|
ACCESS_KEY_ID: abc123
|
||||||
|
SECRET_ACCESS_KEY:
|
||||||
|
value: 3214cbad
|
||||||
|
masked: true
|
||||||
|
protected: true
|
||||||
|
raw: true
|
||||||
|
variable_type: env_var
|
||||||
|
environment_scope: '*'
|
||||||
|
|
||||||
|
- name: Set or update some CI/CD variables with expandable value
|
||||||
|
community.general.gitlab_project_variable:
|
||||||
|
api_url: https://gitlab.com
|
||||||
|
api_token: secret_access_token
|
||||||
|
project: markuman/dotfiles
|
||||||
|
purge: false
|
||||||
|
vars:
|
||||||
|
ACCESS_KEY_ID: abc123
|
||||||
|
SECRET_ACCESS_KEY:
|
||||||
|
value: '$MY_OTHER_VARIABLE'
|
||||||
|
masked: true
|
||||||
|
protected: true
|
||||||
|
raw: false
|
||||||
|
variable_type: env_var
|
||||||
|
environment_scope: '*'
|
||||||
|
|
||||||
- name: Delete one variable
|
- name: Delete one variable
|
||||||
community.general.gitlab_project_variable:
|
community.general.gitlab_project_variable:
|
||||||
api_url: https://gitlab.com
|
api_url: https://gitlab.com
|
||||||
@@ -220,6 +260,7 @@ class GitlabProjectVariables(object):
|
|||||||
"value": var_obj.get('value'),
|
"value": var_obj.get('value'),
|
||||||
"masked": var_obj.get('masked'),
|
"masked": var_obj.get('masked'),
|
||||||
"protected": var_obj.get('protected'),
|
"protected": var_obj.get('protected'),
|
||||||
|
"raw": var_obj.get('raw'),
|
||||||
"variable_type": var_obj.get('variable_type'),
|
"variable_type": var_obj.get('variable_type'),
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -290,6 +331,8 @@ def native_python_main(this_gitlab, purge, requested_variables, state, module):
|
|||||||
item['value'] = str(item.get('value'))
|
item['value'] = str(item.get('value'))
|
||||||
if item.get('protected') is None:
|
if item.get('protected') is None:
|
||||||
item['protected'] = False
|
item['protected'] = False
|
||||||
|
if item.get('raw') is None:
|
||||||
|
item['raw'] = False
|
||||||
if item.get('masked') is None:
|
if item.get('masked') is None:
|
||||||
item['masked'] = False
|
item['masked'] = False
|
||||||
if item.get('environment_scope') is None:
|
if item.get('environment_scope') is None:
|
||||||
@@ -361,11 +404,14 @@ def main():
|
|||||||
project=dict(type='str', required=True),
|
project=dict(type='str', required=True),
|
||||||
purge=dict(type='bool', required=False, default=False),
|
purge=dict(type='bool', required=False, default=False),
|
||||||
vars=dict(type='dict', required=False, default=dict(), no_log=True),
|
vars=dict(type='dict', required=False, default=dict(), no_log=True),
|
||||||
|
# please mind whenever changing the variables dict to also change module_utils/gitlab.py's
|
||||||
|
# KNOWN dict in filter_returned_variables or bad evil will happen
|
||||||
variables=dict(type='list', elements='dict', required=False, default=list(), options=dict(
|
variables=dict(type='list', elements='dict', required=False, default=list(), options=dict(
|
||||||
name=dict(type='str', required=True),
|
name=dict(type='str', required=True),
|
||||||
value=dict(type='str', no_log=True),
|
value=dict(type='str', no_log=True),
|
||||||
masked=dict(type='bool', default=False),
|
masked=dict(type='bool', default=False),
|
||||||
protected=dict(type='bool', default=False),
|
protected=dict(type='bool', default=False),
|
||||||
|
raw=dict(type='bool', default=False),
|
||||||
environment_scope=dict(type='str', default='*'),
|
environment_scope=dict(type='str', default='*'),
|
||||||
variable_type=dict(type='str', default='env_var', choices=["env_var", "file"]),
|
variable_type=dict(type='str', default='env_var', choices=["env_var", "file"]),
|
||||||
)),
|
)),
|
||||||
|
|||||||
@@ -94,9 +94,9 @@ class GitlabProtectedBranch(object):
|
|||||||
self._module = module
|
self._module = module
|
||||||
self.project = self.get_project(project)
|
self.project = self.get_project(project)
|
||||||
self.ACCESS_LEVEL = {
|
self.ACCESS_LEVEL = {
|
||||||
'nobody': gitlab.NO_ACCESS,
|
'nobody': gitlab.const.NO_ACCESS,
|
||||||
'developer': gitlab.DEVELOPER_ACCESS,
|
'developer': gitlab.const.DEVELOPER_ACCESS,
|
||||||
'maintainer': gitlab.MAINTAINER_ACCESS
|
'maintainer': gitlab.const.MAINTAINER_ACCESS
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_project(self, project_name):
|
def get_project(self, project_name):
|
||||||
|
|||||||
@@ -244,12 +244,12 @@ class GitLabUser(object):
|
|||||||
self._gitlab = gitlab_instance
|
self._gitlab = gitlab_instance
|
||||||
self.user_object = None
|
self.user_object = None
|
||||||
self.ACCESS_LEVEL = {
|
self.ACCESS_LEVEL = {
|
||||||
'guest': gitlab.GUEST_ACCESS,
|
'guest': gitlab.const.GUEST_ACCESS,
|
||||||
'reporter': gitlab.REPORTER_ACCESS,
|
'reporter': gitlab.const.REPORTER_ACCESS,
|
||||||
'developer': gitlab.DEVELOPER_ACCESS,
|
'developer': gitlab.const.DEVELOPER_ACCESS,
|
||||||
'master': gitlab.MAINTAINER_ACCESS,
|
'master': gitlab.const.MAINTAINER_ACCESS,
|
||||||
'maintainer': gitlab.MAINTAINER_ACCESS,
|
'maintainer': gitlab.const.MAINTAINER_ACCESS,
|
||||||
'owner': gitlab.OWNER_ACCESS,
|
'owner': gitlab.const.OWNER_ACCESS,
|
||||||
}
|
}
|
||||||
|
|
||||||
'''
|
'''
|
||||||
@@ -485,7 +485,7 @@ class GitLabUser(object):
|
|||||||
|
|
||||||
'''
|
'''
|
||||||
@param user User object
|
@param user User object
|
||||||
@param identites List of identities to be added/updated
|
@param identities List of identities to be added/updated
|
||||||
@param overwrite_identities Overwrite user identities with identities passed to this module
|
@param overwrite_identities Overwrite user identities with identities passed to this module
|
||||||
'''
|
'''
|
||||||
def add_identities(self, user, identities, overwrite_identities=False):
|
def add_identities(self, user, identities, overwrite_identities=False):
|
||||||
@@ -504,7 +504,7 @@ class GitLabUser(object):
|
|||||||
|
|
||||||
'''
|
'''
|
||||||
@param user User object
|
@param user User object
|
||||||
@param identites List of identities to be added/updated
|
@param identities List of identities to be added/updated
|
||||||
'''
|
'''
|
||||||
def delete_identities(self, user, identities):
|
def delete_identities(self, user, identities):
|
||||||
changed = False
|
changed = False
|
||||||
|
|||||||
@@ -165,6 +165,7 @@ changed_pkgs:
|
|||||||
version_added: '0.2.0'
|
version_added: '0.2.0'
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
import json
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
|
|
||||||
@@ -184,6 +185,10 @@ def _create_regex_group_complement(s):
|
|||||||
chars = filter(None, (line.split('#')[0].strip() for line in lines))
|
chars = filter(None, (line.split('#')[0].strip() for line in lines))
|
||||||
group = r'[^' + r''.join(chars) + r']'
|
group = r'[^' + r''.join(chars) + r']'
|
||||||
return re.compile(group)
|
return re.compile(group)
|
||||||
|
|
||||||
|
|
||||||
|
def _check_package_in_json(json_output, package_type):
|
||||||
|
return bool(json_output.get(package_type, []) and json_output[package_type][0].get("installed"))
|
||||||
# /utils ------------------------------------------------------------------ }}}
|
# /utils ------------------------------------------------------------------ }}}
|
||||||
|
|
||||||
|
|
||||||
@@ -479,17 +484,13 @@ class Homebrew(object):
|
|||||||
cmd = [
|
cmd = [
|
||||||
"{brew_path}".format(brew_path=self.brew_path),
|
"{brew_path}".format(brew_path=self.brew_path),
|
||||||
"info",
|
"info",
|
||||||
|
"--json=v2",
|
||||||
self.current_package,
|
self.current_package,
|
||||||
]
|
]
|
||||||
rc, out, err = self.module.run_command(cmd)
|
rc, out, err = self.module.run_command(cmd)
|
||||||
for line in out.split('\n'):
|
data = json.loads(out)
|
||||||
if (
|
|
||||||
re.search(r'Built from source', line)
|
|
||||||
or re.search(r'Poured from bottle', line)
|
|
||||||
):
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
return _check_package_in_json(data, "formulae") or _check_package_in_json(data, "casks")
|
||||||
|
|
||||||
def _current_package_is_outdated(self):
|
def _current_package_is_outdated(self):
|
||||||
if not self.valid_package(self.current_package):
|
if not self.valid_package(self.current_package):
|
||||||
|
|||||||
@@ -393,7 +393,7 @@ class Homectl(object):
|
|||||||
user_metadata.pop('status', None)
|
user_metadata.pop('status', None)
|
||||||
# Let last change Usec be updated by homed when command runs.
|
# Let last change Usec be updated by homed when command runs.
|
||||||
user_metadata.pop('lastChangeUSec', None)
|
user_metadata.pop('lastChangeUSec', None)
|
||||||
# Now only change fields that are called on leaving whats currently in the record intact.
|
# Now only change fields that are called on leaving what's currently in the record intact.
|
||||||
record = user_metadata
|
record = user_metadata
|
||||||
|
|
||||||
record['userName'] = self.name
|
record['userName'] = self.name
|
||||||
@@ -439,7 +439,7 @@ class Homectl(object):
|
|||||||
self.result['changed'] = True
|
self.result['changed'] = True
|
||||||
|
|
||||||
if self.disksize:
|
if self.disksize:
|
||||||
# convert humand readble to bytes
|
# convert human readable to bytes
|
||||||
if self.disksize != record.get('diskSize'):
|
if self.disksize != record.get('diskSize'):
|
||||||
record['diskSize'] = human_to_bytes(self.disksize)
|
record['diskSize'] = human_to_bytes(self.disksize)
|
||||||
self.result['changed'] = True
|
self.result['changed'] = True
|
||||||
|
|||||||
@@ -84,7 +84,7 @@ ilo_redfish_command:
|
|||||||
type: dict
|
type: dict
|
||||||
contains:
|
contains:
|
||||||
ret:
|
ret:
|
||||||
description: Return True/False based on whether the operation was performed succesfully.
|
description: Return True/False based on whether the operation was performed successfully.
|
||||||
type: bool
|
type: bool
|
||||||
msg:
|
msg:
|
||||||
description: Status of the operation performed on the iLO.
|
description: Status of the operation performed on the iLO.
|
||||||
|
|||||||
@@ -142,7 +142,7 @@ class Imgadm(object):
|
|||||||
self.uuid = module.params['uuid']
|
self.uuid = module.params['uuid']
|
||||||
|
|
||||||
# Since there are a number of (natural) aliases, prevent having to look
|
# Since there are a number of (natural) aliases, prevent having to look
|
||||||
# them up everytime we operate on `state`.
|
# them up every time we operate on `state`.
|
||||||
if self.params['state'] in ['present', 'imported', 'updated']:
|
if self.params['state'] in ['present', 'imported', 'updated']:
|
||||||
self.present = True
|
self.present = True
|
||||||
else:
|
else:
|
||||||
@@ -174,7 +174,7 @@ class Imgadm(object):
|
|||||||
|
|
||||||
# There is no feedback from imgadm(1M) to determine if anything
|
# There is no feedback from imgadm(1M) to determine if anything
|
||||||
# was actually changed. So treat this as an 'always-changes' operation.
|
# was actually changed. So treat this as an 'always-changes' operation.
|
||||||
# Note that 'imgadm -v' produces unparseable JSON...
|
# Note that 'imgadm -v' produces unparsable JSON...
|
||||||
self.changed = True
|
self.changed = True
|
||||||
|
|
||||||
def manage_sources(self):
|
def manage_sources(self):
|
||||||
|
|||||||
@@ -4,6 +4,7 @@
|
|||||||
# Copyright (c) 2012, Jan-Piet Mens <jpmens () gmail.com>
|
# Copyright (c) 2012, Jan-Piet Mens <jpmens () gmail.com>
|
||||||
# Copyright (c) 2015, Ales Nosek <anosek.nosek () gmail.com>
|
# Copyright (c) 2015, Ales Nosek <anosek.nosek () gmail.com>
|
||||||
# Copyright (c) 2017, Ansible Project
|
# Copyright (c) 2017, Ansible Project
|
||||||
|
# Copyright (c) 2023, Ansible Project
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
@@ -42,8 +43,8 @@ options:
|
|||||||
description:
|
description:
|
||||||
- Section name in INI file. This is added if O(state=present) automatically when
|
- Section name in INI file. This is added if O(state=present) automatically when
|
||||||
a single value is being set.
|
a single value is being set.
|
||||||
- If left empty, being omitted, or being set to V(null), the O(option) will be placed before the first O(section).
|
- If being omitted, the O(option) will be placed before the first O(section).
|
||||||
- Using V(null) is also required if the config format does not support sections.
|
- Omitting O(section) is also required if the config format does not support sections.
|
||||||
type: str
|
type: str
|
||||||
option:
|
option:
|
||||||
description:
|
description:
|
||||||
@@ -98,6 +99,12 @@ options:
|
|||||||
- Do not insert spaces before and after '=' symbol.
|
- Do not insert spaces before and after '=' symbol.
|
||||||
type: bool
|
type: bool
|
||||||
default: false
|
default: false
|
||||||
|
ignore_spaces:
|
||||||
|
description:
|
||||||
|
- Do not change a line if doing so would only add or remove spaces before or after the V(=) symbol.
|
||||||
|
type: bool
|
||||||
|
default: false
|
||||||
|
version_added: 7.5.0
|
||||||
create:
|
create:
|
||||||
description:
|
description:
|
||||||
- If set to V(false), the module will fail if the file does not already exist.
|
- If set to V(false), the module will fail if the file does not already exist.
|
||||||
@@ -164,6 +171,13 @@ EXAMPLES = r'''
|
|||||||
- pepsi
|
- pepsi
|
||||||
mode: '0600'
|
mode: '0600'
|
||||||
state: present
|
state: present
|
||||||
|
|
||||||
|
- name: Add "beverage=lemon juice" outside a section in specified file
|
||||||
|
community.general.ini_file:
|
||||||
|
path: /etc/conf
|
||||||
|
option: beverage
|
||||||
|
value: lemon juice
|
||||||
|
state: present
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import io
|
import io
|
||||||
@@ -178,7 +192,7 @@ from ansible.module_utils.common.text.converters import to_bytes, to_text
|
|||||||
|
|
||||||
def match_opt(option, line):
|
def match_opt(option, line):
|
||||||
option = re.escape(option)
|
option = re.escape(option)
|
||||||
return re.match('[#;]?( |\t)*(%s)( |\t)*(=|$)( |\t)*(.*)' % option, line)
|
return re.match('([#;]?)( |\t)*(%s)( |\t)*(=|$)( |\t)*(.*)' % option, line)
|
||||||
|
|
||||||
|
|
||||||
def match_active_opt(option, line):
|
def match_active_opt(option, line):
|
||||||
@@ -186,19 +200,27 @@ def match_active_opt(option, line):
|
|||||||
return re.match('( |\t)*(%s)( |\t)*(=|$)( |\t)*(.*)' % option, line)
|
return re.match('( |\t)*(%s)( |\t)*(=|$)( |\t)*(.*)' % option, line)
|
||||||
|
|
||||||
|
|
||||||
def update_section_line(changed, section_lines, index, changed_lines, newline, msg):
|
def update_section_line(option, changed, section_lines, index, changed_lines, ignore_spaces, newline, msg):
|
||||||
option_changed = section_lines[index] != newline
|
option_changed = None
|
||||||
|
if ignore_spaces:
|
||||||
|
old_match = match_opt(option, section_lines[index])
|
||||||
|
if not old_match.group(1):
|
||||||
|
new_match = match_opt(option, newline)
|
||||||
|
option_changed = old_match.group(7) != new_match.group(7)
|
||||||
|
if option_changed is None:
|
||||||
|
option_changed = section_lines[index] != newline
|
||||||
|
if option_changed:
|
||||||
|
section_lines[index] = newline
|
||||||
changed = changed or option_changed
|
changed = changed or option_changed
|
||||||
if option_changed:
|
if option_changed:
|
||||||
msg = 'option changed'
|
msg = 'option changed'
|
||||||
section_lines[index] = newline
|
|
||||||
changed_lines[index] = 1
|
changed_lines[index] = 1
|
||||||
return (changed, msg)
|
return (changed, msg)
|
||||||
|
|
||||||
|
|
||||||
def do_ini(module, filename, section=None, option=None, values=None,
|
def do_ini(module, filename, section=None, option=None, values=None,
|
||||||
state='present', exclusive=True, backup=False, no_extra_spaces=False,
|
state='present', exclusive=True, backup=False, no_extra_spaces=False,
|
||||||
create=True, allow_no_value=False, follow=False):
|
ignore_spaces=False, create=True, allow_no_value=False, follow=False):
|
||||||
|
|
||||||
if section is not None:
|
if section is not None:
|
||||||
section = to_text(section)
|
section = to_text(section)
|
||||||
@@ -306,8 +328,8 @@ def do_ini(module, filename, section=None, option=None, values=None,
|
|||||||
for index, line in enumerate(section_lines):
|
for index, line in enumerate(section_lines):
|
||||||
if match_opt(option, line):
|
if match_opt(option, line):
|
||||||
match = match_opt(option, line)
|
match = match_opt(option, line)
|
||||||
if values and match.group(6) in values:
|
if values and match.group(7) in values:
|
||||||
matched_value = match.group(6)
|
matched_value = match.group(7)
|
||||||
if not matched_value and allow_no_value:
|
if not matched_value and allow_no_value:
|
||||||
# replace existing option with no value line(s)
|
# replace existing option with no value line(s)
|
||||||
newline = u'%s\n' % option
|
newline = u'%s\n' % option
|
||||||
@@ -315,12 +337,12 @@ def do_ini(module, filename, section=None, option=None, values=None,
|
|||||||
else:
|
else:
|
||||||
# replace existing option=value line(s)
|
# replace existing option=value line(s)
|
||||||
newline = assignment_format % (option, matched_value)
|
newline = assignment_format % (option, matched_value)
|
||||||
(changed, msg) = update_section_line(changed, section_lines, index, changed_lines, newline, msg)
|
(changed, msg) = update_section_line(option, changed, section_lines, index, changed_lines, ignore_spaces, newline, msg)
|
||||||
values.remove(matched_value)
|
values.remove(matched_value)
|
||||||
elif not values and allow_no_value:
|
elif not values and allow_no_value:
|
||||||
# replace existing option with no value line(s)
|
# replace existing option with no value line(s)
|
||||||
newline = u'%s\n' % option
|
newline = u'%s\n' % option
|
||||||
(changed, msg) = update_section_line(changed, section_lines, index, changed_lines, newline, msg)
|
(changed, msg) = update_section_line(option, changed, section_lines, index, changed_lines, ignore_spaces, newline, msg)
|
||||||
option_no_value_present = True
|
option_no_value_present = True
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -330,7 +352,7 @@ def do_ini(module, filename, section=None, option=None, values=None,
|
|||||||
for index, line in enumerate(section_lines):
|
for index, line in enumerate(section_lines):
|
||||||
if not changed_lines[index] and match_opt(option, line):
|
if not changed_lines[index] and match_opt(option, line):
|
||||||
newline = assignment_format % (option, values.pop(0))
|
newline = assignment_format % (option, values.pop(0))
|
||||||
(changed, msg) = update_section_line(changed, section_lines, index, changed_lines, newline, msg)
|
(changed, msg) = update_section_line(option, changed, section_lines, index, changed_lines, ignore_spaces, newline, msg)
|
||||||
if len(values) == 0:
|
if len(values) == 0:
|
||||||
break
|
break
|
||||||
# remove all remaining option occurrences from the rest of the section
|
# remove all remaining option occurrences from the rest of the section
|
||||||
@@ -449,6 +471,7 @@ def main():
|
|||||||
state=dict(type='str', default='present', choices=['absent', 'present']),
|
state=dict(type='str', default='present', choices=['absent', 'present']),
|
||||||
exclusive=dict(type='bool', default=True),
|
exclusive=dict(type='bool', default=True),
|
||||||
no_extra_spaces=dict(type='bool', default=False),
|
no_extra_spaces=dict(type='bool', default=False),
|
||||||
|
ignore_spaces=dict(type='bool', default=False),
|
||||||
allow_no_value=dict(type='bool', default=False),
|
allow_no_value=dict(type='bool', default=False),
|
||||||
create=dict(type='bool', default=True),
|
create=dict(type='bool', default=True),
|
||||||
follow=dict(type='bool', default=False)
|
follow=dict(type='bool', default=False)
|
||||||
@@ -469,6 +492,7 @@ def main():
|
|||||||
exclusive = module.params['exclusive']
|
exclusive = module.params['exclusive']
|
||||||
backup = module.params['backup']
|
backup = module.params['backup']
|
||||||
no_extra_spaces = module.params['no_extra_spaces']
|
no_extra_spaces = module.params['no_extra_spaces']
|
||||||
|
ignore_spaces = module.params['ignore_spaces']
|
||||||
allow_no_value = module.params['allow_no_value']
|
allow_no_value = module.params['allow_no_value']
|
||||||
create = module.params['create']
|
create = module.params['create']
|
||||||
follow = module.params['follow']
|
follow = module.params['follow']
|
||||||
@@ -481,7 +505,9 @@ def main():
|
|||||||
elif values is None:
|
elif values is None:
|
||||||
values = []
|
values = []
|
||||||
|
|
||||||
(changed, backup_file, diff, msg) = do_ini(module, path, section, option, values, state, exclusive, backup, no_extra_spaces, create, allow_no_value, follow)
|
(changed, backup_file, diff, msg) = do_ini(
|
||||||
|
module, path, section, option, values, state, exclusive, backup,
|
||||||
|
no_extra_spaces, ignore_spaces, create, allow_no_value, follow)
|
||||||
|
|
||||||
if not module.check_mode and os.path.exists(path):
|
if not module.check_mode and os.path.exists(path):
|
||||||
file_args = module.load_file_common_arguments(module.params)
|
file_args = module.load_file_common_arguments(module.params)
|
||||||
|
|||||||
@@ -133,7 +133,7 @@ def _check_new_pkg(module, package, repository_path):
|
|||||||
def _check_installed_pkg(module, package, repository_path):
|
def _check_installed_pkg(module, package, repository_path):
|
||||||
"""
|
"""
|
||||||
Check the package on AIX.
|
Check the package on AIX.
|
||||||
It verifies if the package is installed and informations
|
It verifies if the package is installed and information
|
||||||
|
|
||||||
:param module: Ansible module parameters spec.
|
:param module: Ansible module parameters spec.
|
||||||
:param package: Package/fileset name.
|
:param package: Package/fileset name.
|
||||||
|
|||||||
@@ -12,14 +12,14 @@ __metaclass__ = type
|
|||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
---
|
---
|
||||||
module: interfaces_file
|
module: interfaces_file
|
||||||
short_description: Tweak settings in /etc/network/interfaces files
|
short_description: Tweak settings in C(/etc/network/interfaces) files
|
||||||
extends_documentation_fragment:
|
extends_documentation_fragment:
|
||||||
- ansible.builtin.files
|
- ansible.builtin.files
|
||||||
- community.general.attributes
|
- community.general.attributes
|
||||||
description:
|
description:
|
||||||
- Manage (add, remove, change) individual interface options in an interfaces-style file without having
|
- Manage (add, remove, change) individual interface options in an interfaces-style file without having
|
||||||
to manage the file as a whole with, say, M(ansible.builtin.template) or M(ansible.builtin.assemble). Interface has to be presented in a file.
|
to manage the file as a whole with, say, M(ansible.builtin.template) or M(ansible.builtin.assemble). Interface has to be presented in a file.
|
||||||
- Read information about interfaces from interfaces-styled files
|
- Read information about interfaces from interfaces-styled files.
|
||||||
attributes:
|
attributes:
|
||||||
check_mode:
|
check_mode:
|
||||||
support: full
|
support: full
|
||||||
@@ -29,27 +29,27 @@ options:
|
|||||||
dest:
|
dest:
|
||||||
type: path
|
type: path
|
||||||
description:
|
description:
|
||||||
- Path to the interfaces file
|
- Path to the interfaces file.
|
||||||
default: /etc/network/interfaces
|
default: /etc/network/interfaces
|
||||||
iface:
|
iface:
|
||||||
type: str
|
type: str
|
||||||
description:
|
description:
|
||||||
- Name of the interface, required for value changes or option remove
|
- Name of the interface, required for value changes or option remove.
|
||||||
address_family:
|
address_family:
|
||||||
type: str
|
type: str
|
||||||
description:
|
description:
|
||||||
- Address family of the interface, useful if same interface name is used for both inet and inet6
|
- Address family of the interface, useful if same interface name is used for both V(inet) and V(inet6).
|
||||||
option:
|
option:
|
||||||
type: str
|
type: str
|
||||||
description:
|
description:
|
||||||
- Name of the option, required for value changes or option remove
|
- Name of the option, required for value changes or option remove.
|
||||||
value:
|
value:
|
||||||
type: str
|
type: str
|
||||||
description:
|
description:
|
||||||
- If O(option) is not presented for the O(iface) and O(state) is V(present) option will be added.
|
- If O(option) is not presented for the O(iface) and O(state) is V(present) option will be added.
|
||||||
If O(option) already exists and is not V(pre-up), V(up), V(post-up) or V(down), it's value will be updated.
|
If O(option) already exists and is not V(pre-up), V(up), V(post-up) or V(down), it's value will be updated.
|
||||||
V(pre-up), V(up), V(post-up) and V(down) options cannot be updated, only adding new options, removing existing
|
V(pre-up), V(up), V(post-up) and V(down) options cannot be updated, only adding new options, removing existing
|
||||||
ones or cleaning the whole option set are supported
|
ones or cleaning the whole option set are supported.
|
||||||
backup:
|
backup:
|
||||||
description:
|
description:
|
||||||
- Create a backup file including the timestamp information so you can get
|
- Create a backup file including the timestamp information so you can get
|
||||||
@@ -64,72 +64,76 @@ options:
|
|||||||
choices: [ "present", "absent" ]
|
choices: [ "present", "absent" ]
|
||||||
|
|
||||||
notes:
|
notes:
|
||||||
- If option is defined multiple times last one will be updated but all will be deleted in case of an absent state
|
- If option is defined multiple times last one will be updated but all will be deleted in case of an absent state.
|
||||||
requirements: []
|
requirements: []
|
||||||
author: "Roman Belyakovsky (@hryamzik)"
|
author: "Roman Belyakovsky (@hryamzik)"
|
||||||
'''
|
'''
|
||||||
|
|
||||||
RETURN = '''
|
RETURN = '''
|
||||||
dest:
|
dest:
|
||||||
description: destination file/path
|
description: Destination file/path.
|
||||||
returned: success
|
returned: success
|
||||||
type: str
|
type: str
|
||||||
sample: "/etc/network/interfaces"
|
sample: "/etc/network/interfaces"
|
||||||
ifaces:
|
ifaces:
|
||||||
description: interfaces dictionary
|
description: Interfaces dictionary.
|
||||||
returned: success
|
returned: success
|
||||||
type: complex
|
type: dict
|
||||||
contains:
|
contains:
|
||||||
ifaces:
|
ifaces:
|
||||||
description: interface dictionary
|
description: Interface dictionary.
|
||||||
returned: success
|
returned: success
|
||||||
type: dict
|
type: dict
|
||||||
contains:
|
contains:
|
||||||
eth0:
|
eth0:
|
||||||
description: Name of the interface
|
description: Name of the interface.
|
||||||
returned: success
|
returned: success
|
||||||
type: dict
|
type: dict
|
||||||
contains:
|
contains:
|
||||||
address_family:
|
address_family:
|
||||||
description: interface address family
|
description: Interface address family.
|
||||||
returned: success
|
returned: success
|
||||||
type: str
|
type: str
|
||||||
sample: "inet"
|
sample: "inet"
|
||||||
method:
|
method:
|
||||||
description: interface method
|
description: Interface method.
|
||||||
returned: success
|
returned: success
|
||||||
type: str
|
type: str
|
||||||
sample: "manual"
|
sample: "manual"
|
||||||
mtu:
|
mtu:
|
||||||
description: other options, all values returned as strings
|
description: Other options, all values returned as strings.
|
||||||
returned: success
|
returned: success
|
||||||
type: str
|
type: str
|
||||||
sample: "1500"
|
sample: "1500"
|
||||||
pre-up:
|
pre-up:
|
||||||
description: list of C(pre-up) scripts
|
description: List of C(pre-up) scripts.
|
||||||
returned: success
|
returned: success
|
||||||
type: list
|
type: list
|
||||||
|
elements: str
|
||||||
sample:
|
sample:
|
||||||
- "route add -net 10.10.10.0/24 gw 10.10.10.1 dev eth1"
|
- "route add -net 10.10.10.0/24 gw 10.10.10.1 dev eth1"
|
||||||
- "route add -net 10.10.11.0/24 gw 10.10.11.1 dev eth2"
|
- "route add -net 10.10.11.0/24 gw 10.10.11.1 dev eth2"
|
||||||
up:
|
up:
|
||||||
description: list of C(up) scripts
|
description: List of C(up) scripts.
|
||||||
returned: success
|
returned: success
|
||||||
type: list
|
type: list
|
||||||
|
elements: str
|
||||||
sample:
|
sample:
|
||||||
- "route add -net 10.10.10.0/24 gw 10.10.10.1 dev eth1"
|
- "route add -net 10.10.10.0/24 gw 10.10.10.1 dev eth1"
|
||||||
- "route add -net 10.10.11.0/24 gw 10.10.11.1 dev eth2"
|
- "route add -net 10.10.11.0/24 gw 10.10.11.1 dev eth2"
|
||||||
post-up:
|
post-up:
|
||||||
description: list of C(post-up) scripts
|
description: List of C(post-up) scripts.
|
||||||
returned: success
|
returned: success
|
||||||
type: list
|
type: list
|
||||||
|
elements: str
|
||||||
sample:
|
sample:
|
||||||
- "route add -net 10.10.10.0/24 gw 10.10.10.1 dev eth1"
|
- "route add -net 10.10.10.0/24 gw 10.10.10.1 dev eth1"
|
||||||
- "route add -net 10.10.11.0/24 gw 10.10.11.1 dev eth2"
|
- "route add -net 10.10.11.0/24 gw 10.10.11.1 dev eth2"
|
||||||
down:
|
down:
|
||||||
description: list of C(down) scripts
|
description: List of C(down) scripts.
|
||||||
returned: success
|
returned: success
|
||||||
type: list
|
type: list
|
||||||
|
elements: str
|
||||||
sample:
|
sample:
|
||||||
- "route del -net 10.10.10.0/24 gw 10.10.10.1 dev eth1"
|
- "route del -net 10.10.10.0/24 gw 10.10.10.1 dev eth1"
|
||||||
- "route del -net 10.10.11.0/24 gw 10.10.11.1 dev eth2"
|
- "route del -net 10.10.11.0/24 gw 10.10.11.1 dev eth2"
|
||||||
@@ -336,6 +340,8 @@ def addOptionAfterLine(option, value, iface, lines, last_line_dict, iface_option
|
|||||||
changed = False
|
changed = False
|
||||||
for ln in lines:
|
for ln in lines:
|
||||||
if ln.get('line_type', '') == 'iface' and ln.get('iface', '') == iface and value != ln.get('params', {}).get('method', ''):
|
if ln.get('line_type', '') == 'iface' and ln.get('iface', '') == iface and value != ln.get('params', {}).get('method', ''):
|
||||||
|
if address_family is not None and ln.get('address_family') != address_family:
|
||||||
|
continue
|
||||||
changed = True
|
changed = True
|
||||||
ln['line'] = re.sub(ln.get('params', {}).get('method', '') + '$', value, ln.get('line'))
|
ln['line'] = re.sub(ln.get('params', {}).get('method', '') + '$', value, ln.get('line'))
|
||||||
ln['params']['method'] = value
|
ln['params']['method'] = value
|
||||||
|
|||||||
@@ -40,6 +40,12 @@ options:
|
|||||||
aliases: ["primarygroup"]
|
aliases: ["primarygroup"]
|
||||||
type: str
|
type: str
|
||||||
version_added: '2.5.0'
|
version_added: '2.5.0'
|
||||||
|
ipagroupobjectclasses:
|
||||||
|
description: A list of group objectclasses.
|
||||||
|
aliases: ["groupobjectclasses"]
|
||||||
|
type: list
|
||||||
|
elements: str
|
||||||
|
version_added: '7.3.0'
|
||||||
ipagroupsearchfields:
|
ipagroupsearchfields:
|
||||||
description: A list of fields to search in when searching for groups.
|
description: A list of fields to search in when searching for groups.
|
||||||
aliases: ["groupsearchfields"]
|
aliases: ["groupsearchfields"]
|
||||||
@@ -85,12 +91,20 @@ options:
|
|||||||
elements: str
|
elements: str
|
||||||
version_added: '3.7.0'
|
version_added: '3.7.0'
|
||||||
ipauserauthtype:
|
ipauserauthtype:
|
||||||
description: The authentication type to use by default.
|
description:
|
||||||
|
- The authentication type to use by default.
|
||||||
|
- The choice V(idp) has been added in community.general 7.3.0.
|
||||||
aliases: ["userauthtype"]
|
aliases: ["userauthtype"]
|
||||||
choices: ["password", "radius", "otp", "pkinit", "hardened", "disabled"]
|
choices: ["password", "radius", "otp", "pkinit", "hardened", "idp", "disabled"]
|
||||||
type: list
|
type: list
|
||||||
elements: str
|
elements: str
|
||||||
version_added: '2.5.0'
|
version_added: '2.5.0'
|
||||||
|
ipauserobjectclasses:
|
||||||
|
description: A list of user objectclasses.
|
||||||
|
aliases: ["userobjectclasses"]
|
||||||
|
type: list
|
||||||
|
elements: str
|
||||||
|
version_added: '7.3.0'
|
||||||
ipausersearchfields:
|
ipausersearchfields:
|
||||||
description: A list of fields to search in when searching for users.
|
description: A list of fields to search in when searching for users.
|
||||||
aliases: ["usersearchfields"]
|
aliases: ["usersearchfields"]
|
||||||
@@ -235,11 +249,12 @@ class ConfigIPAClient(IPAClient):
|
|||||||
|
|
||||||
def get_config_dict(ipaconfigstring=None, ipadefaultloginshell=None,
|
def get_config_dict(ipaconfigstring=None, ipadefaultloginshell=None,
|
||||||
ipadefaultemaildomain=None, ipadefaultprimarygroup=None,
|
ipadefaultemaildomain=None, ipadefaultprimarygroup=None,
|
||||||
ipagroupsearchfields=None, ipahomesrootdir=None,
|
ipagroupsearchfields=None, ipagroupobjectclasses=None,
|
||||||
ipakrbauthzdata=None, ipamaxusernamelength=None,
|
ipahomesrootdir=None, ipakrbauthzdata=None,
|
||||||
ipapwdexpadvnotify=None, ipasearchrecordslimit=None,
|
ipamaxusernamelength=None, ipapwdexpadvnotify=None,
|
||||||
ipasearchtimelimit=None, ipaselinuxusermaporder=None,
|
ipasearchrecordslimit=None, ipasearchtimelimit=None,
|
||||||
ipauserauthtype=None, ipausersearchfields=None):
|
ipaselinuxusermaporder=None, ipauserauthtype=None,
|
||||||
|
ipausersearchfields=None, ipauserobjectclasses=None):
|
||||||
config = {}
|
config = {}
|
||||||
if ipaconfigstring is not None:
|
if ipaconfigstring is not None:
|
||||||
config['ipaconfigstring'] = ipaconfigstring
|
config['ipaconfigstring'] = ipaconfigstring
|
||||||
@@ -249,6 +264,8 @@ def get_config_dict(ipaconfigstring=None, ipadefaultloginshell=None,
|
|||||||
config['ipadefaultemaildomain'] = ipadefaultemaildomain
|
config['ipadefaultemaildomain'] = ipadefaultemaildomain
|
||||||
if ipadefaultprimarygroup is not None:
|
if ipadefaultprimarygroup is not None:
|
||||||
config['ipadefaultprimarygroup'] = ipadefaultprimarygroup
|
config['ipadefaultprimarygroup'] = ipadefaultprimarygroup
|
||||||
|
if ipagroupobjectclasses is not None:
|
||||||
|
config['ipagroupobjectclasses'] = ipagroupobjectclasses
|
||||||
if ipagroupsearchfields is not None:
|
if ipagroupsearchfields is not None:
|
||||||
config['ipagroupsearchfields'] = ','.join(ipagroupsearchfields)
|
config['ipagroupsearchfields'] = ','.join(ipagroupsearchfields)
|
||||||
if ipahomesrootdir is not None:
|
if ipahomesrootdir is not None:
|
||||||
@@ -267,6 +284,8 @@ def get_config_dict(ipaconfigstring=None, ipadefaultloginshell=None,
|
|||||||
config['ipaselinuxusermaporder'] = '$'.join(ipaselinuxusermaporder)
|
config['ipaselinuxusermaporder'] = '$'.join(ipaselinuxusermaporder)
|
||||||
if ipauserauthtype is not None:
|
if ipauserauthtype is not None:
|
||||||
config['ipauserauthtype'] = ipauserauthtype
|
config['ipauserauthtype'] = ipauserauthtype
|
||||||
|
if ipauserobjectclasses is not None:
|
||||||
|
config['ipauserobjectclasses'] = ipauserobjectclasses
|
||||||
if ipausersearchfields is not None:
|
if ipausersearchfields is not None:
|
||||||
config['ipausersearchfields'] = ','.join(ipausersearchfields)
|
config['ipausersearchfields'] = ','.join(ipausersearchfields)
|
||||||
|
|
||||||
@@ -283,6 +302,7 @@ def ensure(module, client):
|
|||||||
ipadefaultloginshell=module.params.get('ipadefaultloginshell'),
|
ipadefaultloginshell=module.params.get('ipadefaultloginshell'),
|
||||||
ipadefaultemaildomain=module.params.get('ipadefaultemaildomain'),
|
ipadefaultemaildomain=module.params.get('ipadefaultemaildomain'),
|
||||||
ipadefaultprimarygroup=module.params.get('ipadefaultprimarygroup'),
|
ipadefaultprimarygroup=module.params.get('ipadefaultprimarygroup'),
|
||||||
|
ipagroupobjectclasses=module.params.get('ipagroupobjectclasses'),
|
||||||
ipagroupsearchfields=module.params.get('ipagroupsearchfields'),
|
ipagroupsearchfields=module.params.get('ipagroupsearchfields'),
|
||||||
ipahomesrootdir=module.params.get('ipahomesrootdir'),
|
ipahomesrootdir=module.params.get('ipahomesrootdir'),
|
||||||
ipakrbauthzdata=module.params.get('ipakrbauthzdata'),
|
ipakrbauthzdata=module.params.get('ipakrbauthzdata'),
|
||||||
@@ -293,6 +313,7 @@ def ensure(module, client):
|
|||||||
ipaselinuxusermaporder=module.params.get('ipaselinuxusermaporder'),
|
ipaselinuxusermaporder=module.params.get('ipaselinuxusermaporder'),
|
||||||
ipauserauthtype=module.params.get('ipauserauthtype'),
|
ipauserauthtype=module.params.get('ipauserauthtype'),
|
||||||
ipausersearchfields=module.params.get('ipausersearchfields'),
|
ipausersearchfields=module.params.get('ipausersearchfields'),
|
||||||
|
ipauserobjectclasses=module.params.get('ipauserobjectclasses'),
|
||||||
)
|
)
|
||||||
ipa_config = client.config_show()
|
ipa_config = client.config_show()
|
||||||
diff = get_config_diff(client, ipa_config, module_config)
|
diff = get_config_diff(client, ipa_config, module_config)
|
||||||
@@ -322,6 +343,8 @@ def main():
|
|||||||
ipadefaultloginshell=dict(type='str', aliases=['loginshell']),
|
ipadefaultloginshell=dict(type='str', aliases=['loginshell']),
|
||||||
ipadefaultemaildomain=dict(type='str', aliases=['emaildomain']),
|
ipadefaultemaildomain=dict(type='str', aliases=['emaildomain']),
|
||||||
ipadefaultprimarygroup=dict(type='str', aliases=['primarygroup']),
|
ipadefaultprimarygroup=dict(type='str', aliases=['primarygroup']),
|
||||||
|
ipagroupobjectclasses=dict(type='list', elements='str',
|
||||||
|
aliases=['groupobjectclasses']),
|
||||||
ipagroupsearchfields=dict(type='list', elements='str',
|
ipagroupsearchfields=dict(type='list', elements='str',
|
||||||
aliases=['groupsearchfields']),
|
aliases=['groupsearchfields']),
|
||||||
ipahomesrootdir=dict(type='str', aliases=['homesrootdir']),
|
ipahomesrootdir=dict(type='str', aliases=['homesrootdir']),
|
||||||
@@ -337,9 +360,11 @@ def main():
|
|||||||
ipauserauthtype=dict(type='list', elements='str',
|
ipauserauthtype=dict(type='list', elements='str',
|
||||||
aliases=['userauthtype'],
|
aliases=['userauthtype'],
|
||||||
choices=["password", "radius", "otp", "pkinit",
|
choices=["password", "radius", "otp", "pkinit",
|
||||||
"hardened", "disabled"]),
|
"hardened", "idp", "disabled"]),
|
||||||
ipausersearchfields=dict(type='list', elements='str',
|
ipausersearchfields=dict(type='list', elements='str',
|
||||||
aliases=['usersearchfields']),
|
aliases=['usersearchfields']),
|
||||||
|
ipauserobjectclasses=dict(type='list', elements='str',
|
||||||
|
aliases=['userobjectclasses']),
|
||||||
)
|
)
|
||||||
|
|
||||||
module = AnsibleModule(
|
module = AnsibleModule(
|
||||||
|
|||||||
@@ -237,7 +237,7 @@ def get_otptoken_dict(ansible_to_ipa, uniqueid=None, newuniqueid=None, otptype=N
|
|||||||
if owner is not None:
|
if owner is not None:
|
||||||
otptoken[ansible_to_ipa['owner']] = owner
|
otptoken[ansible_to_ipa['owner']] = owner
|
||||||
if enabled is not None:
|
if enabled is not None:
|
||||||
otptoken[ansible_to_ipa['enabled']] = 'FALSE' if enabled else 'TRUE'
|
otptoken[ansible_to_ipa['enabled']] = False if enabled else True
|
||||||
if notbefore is not None:
|
if notbefore is not None:
|
||||||
otptoken[ansible_to_ipa['notbefore']] = notbefore + 'Z'
|
otptoken[ansible_to_ipa['notbefore']] = notbefore + 'Z'
|
||||||
if notafter is not None:
|
if notafter is not None:
|
||||||
|
|||||||
@@ -30,7 +30,9 @@ options:
|
|||||||
default: 'always'
|
default: 'always'
|
||||||
choices: [ always, on_create ]
|
choices: [ always, on_create ]
|
||||||
givenname:
|
givenname:
|
||||||
description: First name.
|
description:
|
||||||
|
- First name.
|
||||||
|
- If user does not exist and O(state=present), the usage of O(givenname) is required.
|
||||||
type: str
|
type: str
|
||||||
krbpasswordexpiration:
|
krbpasswordexpiration:
|
||||||
description:
|
description:
|
||||||
@@ -54,7 +56,9 @@ options:
|
|||||||
- Will not be set for an existing user unless O(update_password=always), which is the default.
|
- Will not be set for an existing user unless O(update_password=always), which is the default.
|
||||||
type: str
|
type: str
|
||||||
sn:
|
sn:
|
||||||
description: Surname.
|
description:
|
||||||
|
- Surname.
|
||||||
|
- If user does not exist and O(state=present), the usage of O(sn) is required.
|
||||||
type: str
|
type: str
|
||||||
sshpubkey:
|
sshpubkey:
|
||||||
description:
|
description:
|
||||||
|
|||||||
@@ -69,7 +69,7 @@ EXAMPLES = '''
|
|||||||
|
|
||||||
RETURN = '''
|
RETURN = '''
|
||||||
data:
|
data:
|
||||||
description: "JSON parsed response from ipbase.com. Please refer to U(https://ipbase.com/docs/info) for the detailled structure of the response."
|
description: "JSON parsed response from ipbase.com. Please refer to U(https://ipbase.com/docs/info) for the detailed structure of the response."
|
||||||
returned: success
|
returned: success
|
||||||
type: dict
|
type: dict
|
||||||
sample: {
|
sample: {
|
||||||
|
|||||||
@@ -458,7 +458,7 @@ def main():
|
|||||||
# The issue comes when wanting to restore state from empty iptable-save's
|
# The issue comes when wanting to restore state from empty iptable-save's
|
||||||
# output... what happens when, say:
|
# output... what happens when, say:
|
||||||
# - no table is specified, and iptables-save's output is only nat table;
|
# - no table is specified, and iptables-save's output is only nat table;
|
||||||
# - we give filter's ruleset to iptables-restore, that locks ourselve out
|
# - we give filter's ruleset to iptables-restore, that locks ourselves out
|
||||||
# of the host;
|
# of the host;
|
||||||
# then trying to roll iptables state back to the previous (working) setup
|
# then trying to roll iptables state back to the previous (working) setup
|
||||||
# doesn't override current filter table because no filter table is stored
|
# doesn't override current filter table because no filter table is stored
|
||||||
|
|||||||
@@ -195,7 +195,14 @@ def send_msg(msg, server='localhost', port='6667', channel=None, nick_to=None, k
|
|||||||
|
|
||||||
irc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
irc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
if use_ssl:
|
if use_ssl:
|
||||||
irc = ssl.wrap_socket(irc)
|
if getattr(ssl, 'PROTOCOL_TLS', None) is not None:
|
||||||
|
# Supported since Python 2.7.13
|
||||||
|
context = ssl.SSLContext(ssl.PROTOCOL_TLS)
|
||||||
|
else:
|
||||||
|
context = ssl.SSLContext()
|
||||||
|
context.verify_mode = ssl.CERT_NONE
|
||||||
|
# TODO: create a secure context with `context = ssl.create_default_context()` instead!
|
||||||
|
irc = context.wrap_socket(irc)
|
||||||
irc.connect((server, int(port)))
|
irc.connect((server, int(port)))
|
||||||
|
|
||||||
if passwd:
|
if passwd:
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ requirements:
|
|||||||
author:
|
author:
|
||||||
- Brett Milford (@brettmilford)
|
- Brett Milford (@brettmilford)
|
||||||
- Tong He (@unnecessary-username)
|
- Tong He (@unnecessary-username)
|
||||||
|
- Juan Casanova (@juanmcasanova)
|
||||||
extends_documentation_fragment:
|
extends_documentation_fragment:
|
||||||
- community.general.attributes
|
- community.general.attributes
|
||||||
attributes:
|
attributes:
|
||||||
@@ -65,6 +66,19 @@ options:
|
|||||||
description:
|
description:
|
||||||
- User to authenticate with the Jenkins server.
|
- User to authenticate with the Jenkins server.
|
||||||
type: str
|
type: str
|
||||||
|
detach:
|
||||||
|
description:
|
||||||
|
- Enable detached mode to not wait for the build end.
|
||||||
|
default: false
|
||||||
|
type: bool
|
||||||
|
version_added: 7.4.0
|
||||||
|
time_between_checks:
|
||||||
|
description:
|
||||||
|
- Time in seconds to wait between requests to the Jenkins server.
|
||||||
|
- This times must be higher than the configured quiet time for the job.
|
||||||
|
default: 10
|
||||||
|
type: int
|
||||||
|
version_added: 7.4.0
|
||||||
'''
|
'''
|
||||||
|
|
||||||
EXAMPLES = '''
|
EXAMPLES = '''
|
||||||
@@ -152,6 +166,8 @@ class JenkinsBuild:
|
|||||||
self.user = module.params.get('user')
|
self.user = module.params.get('user')
|
||||||
self.jenkins_url = module.params.get('url')
|
self.jenkins_url = module.params.get('url')
|
||||||
self.build_number = module.params.get('build_number')
|
self.build_number = module.params.get('build_number')
|
||||||
|
self.detach = module.params.get('detach')
|
||||||
|
self.time_between_checks = module.params.get('time_between_checks')
|
||||||
self.server = self.get_jenkins_connection()
|
self.server = self.get_jenkins_connection()
|
||||||
|
|
||||||
self.result = {
|
self.result = {
|
||||||
@@ -235,7 +251,14 @@ class JenkinsBuild:
|
|||||||
build_status = self.get_build_status()
|
build_status = self.get_build_status()
|
||||||
|
|
||||||
if build_status['result'] is None:
|
if build_status['result'] is None:
|
||||||
sleep(10)
|
# If detached mode is active mark as success, we wouldn't be able to get here if it didn't exist
|
||||||
|
if self.detach:
|
||||||
|
result['changed'] = True
|
||||||
|
result['build_info'] = build_status
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
sleep(self.time_between_checks)
|
||||||
self.get_result()
|
self.get_result()
|
||||||
else:
|
else:
|
||||||
if self.state == "stopped" and build_status['result'] == "ABORTED":
|
if self.state == "stopped" and build_status['result'] == "ABORTED":
|
||||||
@@ -273,6 +296,8 @@ def main():
|
|||||||
token=dict(no_log=True),
|
token=dict(no_log=True),
|
||||||
url=dict(default="http://localhost:8080"),
|
url=dict(default="http://localhost:8080"),
|
||||||
user=dict(),
|
user=dict(),
|
||||||
|
detach=dict(type='bool', default=False),
|
||||||
|
time_between_checks=dict(type='int', default=10),
|
||||||
),
|
),
|
||||||
mutually_exclusive=[['password', 'token']],
|
mutually_exclusive=[['password', 'token']],
|
||||||
required_if=[['state', 'absent', ['build_number'], True], ['state', 'stopped', ['build_number'], True]],
|
required_if=[['state', 'absent', ['build_number'], True], ['state', 'stopped', ['build_number'], True]],
|
||||||
@@ -288,7 +313,7 @@ def main():
|
|||||||
else:
|
else:
|
||||||
jenkins_build.absent_build()
|
jenkins_build.absent_build()
|
||||||
|
|
||||||
sleep(10)
|
sleep(jenkins_build.time_between_checks)
|
||||||
result = jenkins_build.get_result()
|
result = jenkins_build.get_result()
|
||||||
module.exit_json(**result)
|
module.exit_json(**result)
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user