mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-04-30 10:26:52 +00:00
Compare commits
98 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5d3a2a3bd4 | ||
|
|
686cdf2a6b | ||
|
|
4928810dda | ||
|
|
4dc2e14039 | ||
|
|
6ec769b051 | ||
|
|
e4d3d24b26 | ||
|
|
572e3f0814 | ||
|
|
e03ade818a | ||
|
|
54725bea77 | ||
|
|
db24f9857a | ||
|
|
c00147e532 | ||
|
|
0baceda7f6 | ||
|
|
c563813e4e | ||
|
|
1dbd7d4d00 | ||
|
|
41b72c0055 | ||
|
|
96a8390b5e | ||
|
|
25474f657a | ||
|
|
d7c4849473 | ||
|
|
0d459e5662 | ||
|
|
01bbab6b2c | ||
|
|
59a7064392 | ||
|
|
8e7b779ec9 | ||
|
|
1ba5344258 | ||
|
|
58e9454379 | ||
|
|
af3dec9b97 | ||
|
|
99a161bd06 | ||
|
|
feabad39f4 | ||
|
|
4a5276b589 | ||
|
|
e342dfb467 | ||
|
|
5b425fc297 | ||
|
|
d8328312a1 | ||
|
|
2ce326ca5b | ||
|
|
90ed2fa5c3 | ||
|
|
407d776610 | ||
|
|
951806c888 | ||
|
|
0fe7ea63a8 | ||
|
|
3a95a84963 | ||
|
|
2c3e93cc4d | ||
|
|
656b25a4a1 | ||
|
|
1863694297 | ||
|
|
c0f753dd21 | ||
|
|
369cde2320 | ||
|
|
e90872b486 | ||
|
|
b52d3504cb | ||
|
|
1e150cda01 | ||
|
|
db135b83dc | ||
|
|
ad4866bb3b | ||
|
|
83339c44b3 | ||
|
|
71633249c4 | ||
|
|
fdf244d488 | ||
|
|
5575d454ab | ||
|
|
d4633cfcd5 | ||
|
|
11315c8c69 | ||
|
|
6c387f87dd | ||
|
|
33cf4877f5 | ||
|
|
6e2fee77a7 | ||
|
|
502e5ceb79 | ||
|
|
4685a53f29 | ||
|
|
79616f47cb | ||
|
|
496218b6e6 | ||
|
|
8bd8ccd974 | ||
|
|
c802de865a | ||
|
|
1dfd6e395c | ||
|
|
25eabb39a6 | ||
|
|
869e0e60c2 | ||
|
|
cae5823685 | ||
|
|
3d0dbc1fb0 | ||
|
|
912583026f | ||
|
|
748304dadd | ||
|
|
253c2179de | ||
|
|
fcc72e5af1 | ||
|
|
d472953e10 | ||
|
|
c78d6c95d6 | ||
|
|
c9cb987eb7 | ||
|
|
099a99d288 | ||
|
|
26ea01d5b4 | ||
|
|
a9afbe59e5 | ||
|
|
dc9cab36ac | ||
|
|
99265c5126 | ||
|
|
57aede6b95 | ||
|
|
e51e41203a | ||
|
|
54644179ea | ||
|
|
7d6a1a4483 | ||
|
|
2715e4456c | ||
|
|
a335d1cc56 | ||
|
|
a89b43b110 | ||
|
|
1b599bde37 | ||
|
|
7bd987e2b9 | ||
|
|
8b0896a43d | ||
|
|
402bb01501 | ||
|
|
75afd83508 | ||
|
|
b25f0f3cd2 | ||
|
|
9226c4b0d5 | ||
|
|
fe3e262209 | ||
|
|
b9fac26dcd | ||
|
|
343e5a03a7 | ||
|
|
acea082a7c | ||
|
|
0cff1f116f |
@@ -1,3 +0,0 @@
|
|||||||
## Azure Pipelines Configuration
|
|
||||||
|
|
||||||
Please see the [Documentation](https://github.com/ansible/community/wiki/Testing:-Azure-Pipelines) for more information.
|
|
||||||
@@ -1,414 +0,0 @@
|
|||||||
trigger:
|
|
||||||
batch: true
|
|
||||||
branches:
|
|
||||||
include:
|
|
||||||
- main
|
|
||||||
- stable-*
|
|
||||||
|
|
||||||
pr:
|
|
||||||
autoCancel: true
|
|
||||||
branches:
|
|
||||||
include:
|
|
||||||
- main
|
|
||||||
- stable-*
|
|
||||||
|
|
||||||
schedules:
|
|
||||||
- cron: 0 8 * * *
|
|
||||||
displayName: Nightly (main)
|
|
||||||
always: true
|
|
||||||
branches:
|
|
||||||
include:
|
|
||||||
- main
|
|
||||||
- cron: 0 10 * * *
|
|
||||||
displayName: Nightly (active stable branches)
|
|
||||||
always: true
|
|
||||||
branches:
|
|
||||||
include:
|
|
||||||
- stable-5
|
|
||||||
- stable-4
|
|
||||||
- cron: 0 11 * * 0
|
|
||||||
displayName: Weekly (old stable branches)
|
|
||||||
always: true
|
|
||||||
branches:
|
|
||||||
include:
|
|
||||||
- stable-3
|
|
||||||
|
|
||||||
variables:
|
|
||||||
- name: checkoutPath
|
|
||||||
value: ansible_collections/community/general
|
|
||||||
- name: coverageBranches
|
|
||||||
value: main
|
|
||||||
- name: pipelinesCoverage
|
|
||||||
value: coverage
|
|
||||||
- name: entryPoint
|
|
||||||
value: tests/utils/shippable/shippable.sh
|
|
||||||
- name: fetchDepth
|
|
||||||
value: 0
|
|
||||||
|
|
||||||
resources:
|
|
||||||
containers:
|
|
||||||
- container: default
|
|
||||||
image: quay.io/ansible/azure-pipelines-test-container:1.9.0
|
|
||||||
|
|
||||||
pool: Standard
|
|
||||||
|
|
||||||
stages:
|
|
||||||
### Sanity
|
|
||||||
- stage: Sanity_devel
|
|
||||||
displayName: Sanity devel
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Test {0}
|
|
||||||
testFormat: devel/sanity/{0}
|
|
||||||
targets:
|
|
||||||
- test: 1
|
|
||||||
- test: 2
|
|
||||||
- test: 3
|
|
||||||
- test: 4
|
|
||||||
- test: extra
|
|
||||||
- stage: Sanity_2_13
|
|
||||||
displayName: Sanity 2.13
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Test {0}
|
|
||||||
testFormat: 2.13/sanity/{0}
|
|
||||||
targets:
|
|
||||||
- test: 1
|
|
||||||
- test: 2
|
|
||||||
- test: 3
|
|
||||||
- test: 4
|
|
||||||
- stage: Sanity_2_12
|
|
||||||
displayName: Sanity 2.12
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Test {0}
|
|
||||||
testFormat: 2.12/sanity/{0}
|
|
||||||
targets:
|
|
||||||
- test: 1
|
|
||||||
- test: 2
|
|
||||||
- test: 3
|
|
||||||
- test: 4
|
|
||||||
- stage: Sanity_2_11
|
|
||||||
displayName: Sanity 2.11
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Test {0}
|
|
||||||
testFormat: 2.11/sanity/{0}
|
|
||||||
targets:
|
|
||||||
- test: 1
|
|
||||||
- test: 2
|
|
||||||
- test: 3
|
|
||||||
- test: 4
|
|
||||||
### Units
|
|
||||||
- stage: Units_devel
|
|
||||||
displayName: Units devel
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: devel/units/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 2.7
|
|
||||||
- test: 3.5
|
|
||||||
- test: 3.6
|
|
||||||
- test: 3.7
|
|
||||||
- test: 3.8
|
|
||||||
- test: 3.9
|
|
||||||
- test: '3.10'
|
|
||||||
- stage: Units_2_13
|
|
||||||
displayName: Units 2.13
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.13/units/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 2.7
|
|
||||||
- test: 3.6
|
|
||||||
- test: 3.8
|
|
||||||
- test: 3.9
|
|
||||||
- stage: Units_2_12
|
|
||||||
displayName: Units 2.12
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.12/units/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 2.6
|
|
||||||
- test: 3.5
|
|
||||||
- test: 3.8
|
|
||||||
- stage: Units_2_11
|
|
||||||
displayName: Units 2.11
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.11/units/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 2.6
|
|
||||||
- test: 2.7
|
|
||||||
- test: 3.5
|
|
||||||
- test: 3.6
|
|
||||||
- test: 3.9
|
|
||||||
|
|
||||||
## Remote
|
|
||||||
- stage: Remote_devel
|
|
||||||
displayName: Remote devel
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: devel/{0}
|
|
||||||
targets:
|
|
||||||
- name: macOS 12.0
|
|
||||||
test: macos/12.0
|
|
||||||
- name: RHEL 7.9
|
|
||||||
test: rhel/7.9
|
|
||||||
- name: RHEL 8.5
|
|
||||||
test: rhel/8.5
|
|
||||||
- name: FreeBSD 12.3
|
|
||||||
test: freebsd/12.3
|
|
||||||
- name: FreeBSD 13.0
|
|
||||||
test: freebsd/13.0
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- stage: Remote_2_13
|
|
||||||
displayName: Remote 2.13
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.13/{0}
|
|
||||||
targets:
|
|
||||||
- name: macOS 12.0
|
|
||||||
test: macos/12.0
|
|
||||||
- name: RHEL 8.5
|
|
||||||
test: rhel/8.5
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- stage: Remote_2_12
|
|
||||||
displayName: Remote 2.12
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.12/{0}
|
|
||||||
targets:
|
|
||||||
- name: macOS 11.1
|
|
||||||
test: macos/11.1
|
|
||||||
- name: RHEL 8.4
|
|
||||||
test: rhel/8.4
|
|
||||||
- name: FreeBSD 13.0
|
|
||||||
test: freebsd/13.0
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- stage: Remote_2_11
|
|
||||||
displayName: Remote 2.11
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.11/{0}
|
|
||||||
targets:
|
|
||||||
- name: RHEL 7.9
|
|
||||||
test: rhel/7.9
|
|
||||||
- name: RHEL 8.3
|
|
||||||
test: rhel/8.3
|
|
||||||
#- name: FreeBSD 12.2
|
|
||||||
# test: freebsd/12.2
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
|
|
||||||
### Docker
|
|
||||||
- stage: Docker_devel
|
|
||||||
displayName: Docker devel
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: devel/linux/{0}
|
|
||||||
targets:
|
|
||||||
- name: CentOS 7
|
|
||||||
test: centos7
|
|
||||||
- name: Fedora 34
|
|
||||||
test: fedora34
|
|
||||||
- name: Fedora 35
|
|
||||||
test: fedora35
|
|
||||||
- name: openSUSE 15
|
|
||||||
test: opensuse15
|
|
||||||
- name: Ubuntu 18.04
|
|
||||||
test: ubuntu1804
|
|
||||||
- name: Ubuntu 20.04
|
|
||||||
test: ubuntu2004
|
|
||||||
- name: Alpine 3
|
|
||||||
test: alpine3
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- stage: Docker_2_13
|
|
||||||
displayName: Docker 2.13
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.13/linux/{0}
|
|
||||||
targets:
|
|
||||||
- name: Fedora 35
|
|
||||||
test: fedora35
|
|
||||||
- name: openSUSE 15 py2
|
|
||||||
test: opensuse15py2
|
|
||||||
- name: Alpine 3
|
|
||||||
test: alpine3
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- stage: Docker_2_12
|
|
||||||
displayName: Docker 2.12
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.12/linux/{0}
|
|
||||||
targets:
|
|
||||||
- name: CentOS 6
|
|
||||||
test: centos6
|
|
||||||
- name: Fedora 34
|
|
||||||
test: fedora34
|
|
||||||
- name: Ubuntu 20.04
|
|
||||||
test: ubuntu2004
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- stage: Docker_2_11
|
|
||||||
displayName: Docker 2.11
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.11/linux/{0}
|
|
||||||
targets:
|
|
||||||
- name: Fedora 32
|
|
||||||
test: fedora32
|
|
||||||
- name: Fedora 33
|
|
||||||
test: fedora33
|
|
||||||
- name: Alpine 3
|
|
||||||
test: alpine3
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
|
|
||||||
### Community Docker
|
|
||||||
- stage: Docker_community_devel
|
|
||||||
displayName: Docker (community images) devel
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: devel/linux-community/{0}
|
|
||||||
targets:
|
|
||||||
- name: Debian Bullseye
|
|
||||||
test: debian-bullseye/3.9
|
|
||||||
- name: ArchLinux
|
|
||||||
test: archlinux/3.10
|
|
||||||
- name: CentOS Stream 8
|
|
||||||
test: centos-stream8/3.8
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
|
|
||||||
### Cloud
|
|
||||||
- stage: Cloud_devel
|
|
||||||
displayName: Cloud devel
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: devel/cloud/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 2.7
|
|
||||||
- test: '3.10'
|
|
||||||
- stage: Cloud_2_13
|
|
||||||
displayName: Cloud 2.13
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.13/cloud/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 3.9
|
|
||||||
- stage: Cloud_2_12
|
|
||||||
displayName: Cloud 2.12
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.12/cloud/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 3.8
|
|
||||||
- stage: Cloud_2_11
|
|
||||||
displayName: Cloud 2.11
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.11/cloud/{0}/1
|
|
||||||
targets:
|
|
||||||
- test: 2.7
|
|
||||||
- test: 3.5
|
|
||||||
|
|
||||||
- stage: Summary
|
|
||||||
condition: succeededOrFailed()
|
|
||||||
dependsOn:
|
|
||||||
- Sanity_devel
|
|
||||||
- Sanity_2_11
|
|
||||||
- Sanity_2_12
|
|
||||||
- Sanity_2_13
|
|
||||||
- Units_devel
|
|
||||||
- Units_2_11
|
|
||||||
- Units_2_12
|
|
||||||
- Units_2_13
|
|
||||||
- Remote_devel
|
|
||||||
- Remote_2_11
|
|
||||||
- Remote_2_12
|
|
||||||
- Remote_2_13
|
|
||||||
- Docker_devel
|
|
||||||
- Docker_2_11
|
|
||||||
- Docker_2_12
|
|
||||||
- Docker_2_13
|
|
||||||
- Docker_community_devel
|
|
||||||
- Cloud_devel
|
|
||||||
- Cloud_2_11
|
|
||||||
- Cloud_2_12
|
|
||||||
- Cloud_2_13
|
|
||||||
jobs:
|
|
||||||
- template: templates/coverage.yml
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
# Aggregate code coverage results for later processing.
|
|
||||||
|
|
||||||
set -o pipefail -eu
|
|
||||||
|
|
||||||
agent_temp_directory="$1"
|
|
||||||
|
|
||||||
PATH="${PWD}/bin:${PATH}"
|
|
||||||
|
|
||||||
mkdir "${agent_temp_directory}/coverage/"
|
|
||||||
|
|
||||||
options=(--venv --venv-system-site-packages --color -v)
|
|
||||||
|
|
||||||
ansible-test coverage combine --group-by command --export "${agent_temp_directory}/coverage/" "${options[@]}"
|
|
||||||
|
|
||||||
if ansible-test coverage analyze targets generate --help >/dev/null 2>&1; then
|
|
||||||
# Only analyze coverage if the installed version of ansible-test supports it.
|
|
||||||
# Doing so allows this script to work unmodified for multiple Ansible versions.
|
|
||||||
ansible-test coverage analyze targets generate "${agent_temp_directory}/coverage/coverage-analyze-targets.json" "${options[@]}"
|
|
||||||
fi
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
"""
|
|
||||||
Combine coverage data from multiple jobs, keeping the data only from the most recent attempt from each job.
|
|
||||||
Coverage artifacts must be named using the format: "Coverage $(System.JobAttempt) {StableUniqueNameForEachJob}"
|
|
||||||
The recommended coverage artifact name format is: Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)
|
|
||||||
Keep in mind that Azure Pipelines does not enforce unique job display names (only names).
|
|
||||||
It is up to pipeline authors to avoid name collisions when deviating from the recommended format.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""Main program entry point."""
|
|
||||||
source_directory = sys.argv[1]
|
|
||||||
|
|
||||||
if '/ansible_collections/' in os.getcwd():
|
|
||||||
output_path = "tests/output"
|
|
||||||
else:
|
|
||||||
output_path = "test/results"
|
|
||||||
|
|
||||||
destination_directory = os.path.join(output_path, 'coverage')
|
|
||||||
|
|
||||||
if not os.path.exists(destination_directory):
|
|
||||||
os.makedirs(destination_directory)
|
|
||||||
|
|
||||||
jobs = {}
|
|
||||||
count = 0
|
|
||||||
|
|
||||||
for name in os.listdir(source_directory):
|
|
||||||
match = re.search('^Coverage (?P<attempt>[0-9]+) (?P<label>.+)$', name)
|
|
||||||
label = match.group('label')
|
|
||||||
attempt = int(match.group('attempt'))
|
|
||||||
jobs[label] = max(attempt, jobs.get(label, 0))
|
|
||||||
|
|
||||||
for label, attempt in jobs.items():
|
|
||||||
name = 'Coverage {attempt} {label}'.format(label=label, attempt=attempt)
|
|
||||||
source = os.path.join(source_directory, name)
|
|
||||||
source_files = os.listdir(source)
|
|
||||||
|
|
||||||
for source_file in source_files:
|
|
||||||
source_path = os.path.join(source, source_file)
|
|
||||||
destination_path = os.path.join(destination_directory, source_file + '.' + label)
|
|
||||||
print('"%s" -> "%s"' % (source_path, destination_path))
|
|
||||||
shutil.copyfile(source_path, destination_path)
|
|
||||||
count += 1
|
|
||||||
|
|
||||||
print('Coverage file count: %d' % count)
|
|
||||||
print('##vso[task.setVariable variable=coverageFileCount]%d' % count)
|
|
||||||
print('##vso[task.setVariable variable=outputPath]%s' % output_path)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
# Check the test results and set variables for use in later steps.
|
|
||||||
|
|
||||||
set -o pipefail -eu
|
|
||||||
|
|
||||||
if [[ "$PWD" =~ /ansible_collections/ ]]; then
|
|
||||||
output_path="tests/output"
|
|
||||||
else
|
|
||||||
output_path="test/results"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "##vso[task.setVariable variable=outputPath]${output_path}"
|
|
||||||
|
|
||||||
if compgen -G "${output_path}"'/junit/*.xml' > /dev/null; then
|
|
||||||
echo "##vso[task.setVariable variable=haveTestResults]true"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if compgen -G "${output_path}"'/bot/ansible-test-*' > /dev/null; then
|
|
||||||
echo "##vso[task.setVariable variable=haveBotResults]true"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if compgen -G "${output_path}"'/coverage/*' > /dev/null; then
|
|
||||||
echo "##vso[task.setVariable variable=haveCoverageData]true"
|
|
||||||
fi
|
|
||||||
@@ -1,101 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
"""
|
|
||||||
Upload code coverage reports to codecov.io.
|
|
||||||
Multiple coverage files from multiple languages are accepted and aggregated after upload.
|
|
||||||
Python coverage, as well as PowerShell and Python stubs can all be uploaded.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import dataclasses
|
|
||||||
import pathlib
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import tempfile
|
|
||||||
import typing as t
|
|
||||||
import urllib.request
|
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass(frozen=True)
|
|
||||||
class CoverageFile:
|
|
||||||
name: str
|
|
||||||
path: pathlib.Path
|
|
||||||
flags: t.List[str]
|
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass(frozen=True)
|
|
||||||
class Args:
|
|
||||||
dry_run: bool
|
|
||||||
path: pathlib.Path
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args() -> Args:
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument('-n', '--dry-run', action='store_true')
|
|
||||||
parser.add_argument('path', type=pathlib.Path)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
# Store arguments in a typed dataclass
|
|
||||||
fields = dataclasses.fields(Args)
|
|
||||||
kwargs = {field.name: getattr(args, field.name) for field in fields}
|
|
||||||
|
|
||||||
return Args(**kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def process_files(directory: pathlib.Path) -> t.Tuple[CoverageFile, ...]:
|
|
||||||
processed = []
|
|
||||||
for file in directory.joinpath('reports').glob('coverage*.xml'):
|
|
||||||
name = file.stem.replace('coverage=', '')
|
|
||||||
|
|
||||||
# Get flags from name
|
|
||||||
flags = name.replace('-powershell', '').split('=') # Drop '-powershell' suffix
|
|
||||||
flags = [flag if not flag.startswith('stub') else flag.split('-')[0] for flag in flags] # Remove "-01" from stub files
|
|
||||||
|
|
||||||
processed.append(CoverageFile(name, file, flags))
|
|
||||||
|
|
||||||
return tuple(processed)
|
|
||||||
|
|
||||||
|
|
||||||
def upload_files(codecov_bin: pathlib.Path, files: t.Tuple[CoverageFile, ...], dry_run: bool = False) -> None:
|
|
||||||
for file in files:
|
|
||||||
cmd = [
|
|
||||||
str(codecov_bin),
|
|
||||||
'--name', file.name,
|
|
||||||
'--file', str(file.path),
|
|
||||||
]
|
|
||||||
for flag in file.flags:
|
|
||||||
cmd.extend(['--flags', flag])
|
|
||||||
|
|
||||||
if dry_run:
|
|
||||||
print(f'DRY-RUN: Would run command: {cmd}')
|
|
||||||
continue
|
|
||||||
|
|
||||||
subprocess.run(cmd, check=True)
|
|
||||||
|
|
||||||
|
|
||||||
def download_file(url: str, dest: pathlib.Path, flags: int, dry_run: bool = False) -> None:
|
|
||||||
if dry_run:
|
|
||||||
print(f'DRY-RUN: Would download {url} to {dest} and set mode to {flags:o}')
|
|
||||||
return
|
|
||||||
|
|
||||||
with urllib.request.urlopen(url) as resp:
|
|
||||||
with dest.open('w+b') as f:
|
|
||||||
# Read data in chunks rather than all at once
|
|
||||||
shutil.copyfileobj(resp, f, 64 * 1024)
|
|
||||||
|
|
||||||
dest.chmod(flags)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
args = parse_args()
|
|
||||||
url = 'https://ansible-ci-files.s3.amazonaws.com/codecov/linux/codecov'
|
|
||||||
with tempfile.TemporaryDirectory(prefix='codecov-') as tmpdir:
|
|
||||||
codecov_bin = pathlib.Path(tmpdir) / 'codecov'
|
|
||||||
download_file(url, codecov_bin, 0o755, args.dry_run)
|
|
||||||
|
|
||||||
files = process_files(args.path)
|
|
||||||
upload_files(codecov_bin, files, args.dry_run)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
# Generate code coverage reports for uploading to Azure Pipelines and codecov.io.
|
|
||||||
|
|
||||||
set -o pipefail -eu
|
|
||||||
|
|
||||||
PATH="${PWD}/bin:${PATH}"
|
|
||||||
|
|
||||||
if ! ansible-test --help >/dev/null 2>&1; then
|
|
||||||
# Install the devel version of ansible-test for generating code coverage reports.
|
|
||||||
# This is only used by Ansible Collections, which are typically tested against multiple Ansible versions (in separate jobs).
|
|
||||||
# Since a version of ansible-test is required that can work the output from multiple older releases, the devel version is used.
|
|
||||||
pip install https://github.com/ansible/ansible/archive/devel.tar.gz --disable-pip-version-check
|
|
||||||
fi
|
|
||||||
|
|
||||||
ansible-test coverage xml --group-by command --stub --venv --venv-system-site-packages --color -v
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
# Configure the test environment and run the tests.
|
|
||||||
|
|
||||||
set -o pipefail -eu
|
|
||||||
|
|
||||||
entry_point="$1"
|
|
||||||
test="$2"
|
|
||||||
read -r -a coverage_branches <<< "$3" # space separated list of branches to run code coverage on for scheduled builds
|
|
||||||
|
|
||||||
export COMMIT_MESSAGE
|
|
||||||
export COMPLETE
|
|
||||||
export COVERAGE
|
|
||||||
export IS_PULL_REQUEST
|
|
||||||
|
|
||||||
if [ "${SYSTEM_PULLREQUEST_TARGETBRANCH:-}" ]; then
|
|
||||||
IS_PULL_REQUEST=true
|
|
||||||
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD^2)
|
|
||||||
else
|
|
||||||
IS_PULL_REQUEST=
|
|
||||||
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD)
|
|
||||||
fi
|
|
||||||
|
|
||||||
COMPLETE=
|
|
||||||
COVERAGE=
|
|
||||||
|
|
||||||
if [ "${BUILD_REASON}" = "Schedule" ]; then
|
|
||||||
COMPLETE=yes
|
|
||||||
|
|
||||||
if printf '%s\n' "${coverage_branches[@]}" | grep -q "^${BUILD_SOURCEBRANCHNAME}$"; then
|
|
||||||
COVERAGE=yes
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
"${entry_point}" "${test}" 2>&1 | "$(dirname "$0")/time-command.py"
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
"""Prepends a relative timestamp to each input line from stdin and writes it to stdout."""
|
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""Main program entry point."""
|
|
||||||
start = time.time()
|
|
||||||
|
|
||||||
sys.stdin.reconfigure(errors='surrogateescape')
|
|
||||||
sys.stdout.reconfigure(errors='surrogateescape')
|
|
||||||
|
|
||||||
for line in sys.stdin:
|
|
||||||
seconds = time.time() - start
|
|
||||||
sys.stdout.write('%02d:%02d %s' % (seconds // 60, seconds % 60, line))
|
|
||||||
sys.stdout.flush()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
# This template adds a job for processing code coverage data.
|
|
||||||
# It will upload results to Azure Pipelines and codecov.io.
|
|
||||||
# Use it from a job stage that completes after all other jobs have completed.
|
|
||||||
# This can be done by placing it in a separate summary stage that runs after the test stage(s) have completed.
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
- job: Coverage
|
|
||||||
displayName: Code Coverage
|
|
||||||
container: default
|
|
||||||
workspace:
|
|
||||||
clean: all
|
|
||||||
steps:
|
|
||||||
- checkout: self
|
|
||||||
fetchDepth: $(fetchDepth)
|
|
||||||
path: $(checkoutPath)
|
|
||||||
- task: DownloadPipelineArtifact@2
|
|
||||||
displayName: Download Coverage Data
|
|
||||||
inputs:
|
|
||||||
path: coverage/
|
|
||||||
patterns: "Coverage */*=coverage.combined"
|
|
||||||
- bash: .azure-pipelines/scripts/combine-coverage.py coverage/
|
|
||||||
displayName: Combine Coverage Data
|
|
||||||
- bash: .azure-pipelines/scripts/report-coverage.sh
|
|
||||||
displayName: Generate Coverage Report
|
|
||||||
condition: gt(variables.coverageFileCount, 0)
|
|
||||||
- task: PublishCodeCoverageResults@1
|
|
||||||
inputs:
|
|
||||||
codeCoverageTool: Cobertura
|
|
||||||
# Azure Pipelines only accepts a single coverage data file.
|
|
||||||
# That means only Python or PowerShell coverage can be uploaded, but not both.
|
|
||||||
# Set the "pipelinesCoverage" variable to determine which type is uploaded.
|
|
||||||
# Use "coverage" for Python and "coverage-powershell" for PowerShell.
|
|
||||||
summaryFileLocation: "$(outputPath)/reports/$(pipelinesCoverage).xml"
|
|
||||||
displayName: Publish to Azure Pipelines
|
|
||||||
condition: gt(variables.coverageFileCount, 0)
|
|
||||||
- bash: .azure-pipelines/scripts/publish-codecov.py "$(outputPath)"
|
|
||||||
displayName: Publish to codecov.io
|
|
||||||
condition: gt(variables.coverageFileCount, 0)
|
|
||||||
continueOnError: true
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
# This template uses the provided targets and optional groups to generate a matrix which is then passed to the test template.
|
|
||||||
# If this matrix template does not provide the required functionality, consider using the test template directly instead.
|
|
||||||
|
|
||||||
parameters:
|
|
||||||
# A required list of dictionaries, one per test target.
|
|
||||||
# Each item in the list must contain a "test" or "name" key.
|
|
||||||
# Both may be provided. If one is omitted, the other will be used.
|
|
||||||
- name: targets
|
|
||||||
type: object
|
|
||||||
|
|
||||||
# An optional list of values which will be used to multiply the targets list into a matrix.
|
|
||||||
# Values can be strings or numbers.
|
|
||||||
- name: groups
|
|
||||||
type: object
|
|
||||||
default: []
|
|
||||||
|
|
||||||
# An optional format string used to generate the job name.
|
|
||||||
# - {0} is the name of an item in the targets list.
|
|
||||||
- name: nameFormat
|
|
||||||
type: string
|
|
||||||
default: "{0}"
|
|
||||||
|
|
||||||
# An optional format string used to generate the test name.
|
|
||||||
# - {0} is the name of an item in the targets list.
|
|
||||||
- name: testFormat
|
|
||||||
type: string
|
|
||||||
default: "{0}"
|
|
||||||
|
|
||||||
# An optional format string used to add the group to the job name.
|
|
||||||
# {0} is the formatted name of an item in the targets list.
|
|
||||||
# {{1}} is the group -- be sure to include the double "{{" and "}}".
|
|
||||||
- name: nameGroupFormat
|
|
||||||
type: string
|
|
||||||
default: "{0} - {{1}}"
|
|
||||||
|
|
||||||
# An optional format string used to add the group to the test name.
|
|
||||||
# {0} is the formatted test of an item in the targets list.
|
|
||||||
# {{1}} is the group -- be sure to include the double "{{" and "}}".
|
|
||||||
- name: testGroupFormat
|
|
||||||
type: string
|
|
||||||
default: "{0}/{{1}}"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
- template: test.yml
|
|
||||||
parameters:
|
|
||||||
jobs:
|
|
||||||
- ${{ if eq(length(parameters.groups), 0) }}:
|
|
||||||
- ${{ each target in parameters.targets }}:
|
|
||||||
- name: ${{ format(parameters.nameFormat, coalesce(target.name, target.test)) }}
|
|
||||||
test: ${{ format(parameters.testFormat, coalesce(target.test, target.name)) }}
|
|
||||||
- ${{ if not(eq(length(parameters.groups), 0)) }}:
|
|
||||||
- ${{ each group in parameters.groups }}:
|
|
||||||
- ${{ each target in parameters.targets }}:
|
|
||||||
- name: ${{ format(format(parameters.nameGroupFormat, parameters.nameFormat), coalesce(target.name, target.test), group) }}
|
|
||||||
test: ${{ format(format(parameters.testGroupFormat, parameters.testFormat), coalesce(target.test, target.name), group) }}
|
|
||||||
@@ -1,45 +0,0 @@
|
|||||||
# This template uses the provided list of jobs to create test one or more test jobs.
|
|
||||||
# It can be used directly if needed, or through the matrix template.
|
|
||||||
|
|
||||||
parameters:
|
|
||||||
# A required list of dictionaries, one per test job.
|
|
||||||
# Each item in the list must contain a "job" and "name" key.
|
|
||||||
- name: jobs
|
|
||||||
type: object
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
- ${{ each job in parameters.jobs }}:
|
|
||||||
- job: test_${{ replace(replace(replace(job.test, '/', '_'), '.', '_'), '-', '_') }}
|
|
||||||
displayName: ${{ job.name }}
|
|
||||||
container: default
|
|
||||||
workspace:
|
|
||||||
clean: all
|
|
||||||
steps:
|
|
||||||
- checkout: self
|
|
||||||
fetchDepth: $(fetchDepth)
|
|
||||||
path: $(checkoutPath)
|
|
||||||
- bash: .azure-pipelines/scripts/run-tests.sh "$(entryPoint)" "${{ job.test }}" "$(coverageBranches)"
|
|
||||||
displayName: Run Tests
|
|
||||||
- bash: .azure-pipelines/scripts/process-results.sh
|
|
||||||
condition: succeededOrFailed()
|
|
||||||
displayName: Process Results
|
|
||||||
- bash: .azure-pipelines/scripts/aggregate-coverage.sh "$(Agent.TempDirectory)"
|
|
||||||
condition: eq(variables.haveCoverageData, 'true')
|
|
||||||
displayName: Aggregate Coverage Data
|
|
||||||
- task: PublishTestResults@2
|
|
||||||
condition: eq(variables.haveTestResults, 'true')
|
|
||||||
inputs:
|
|
||||||
testResultsFiles: "$(outputPath)/junit/*.xml"
|
|
||||||
displayName: Publish Test Results
|
|
||||||
- task: PublishPipelineArtifact@1
|
|
||||||
condition: eq(variables.haveBotResults, 'true')
|
|
||||||
displayName: Publish Bot Results
|
|
||||||
inputs:
|
|
||||||
targetPath: "$(outputPath)/bot/"
|
|
||||||
artifactName: "Bot $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
|
|
||||||
- task: PublishPipelineArtifact@1
|
|
||||||
condition: eq(variables.haveCoverageData, 'true')
|
|
||||||
displayName: Publish Coverage Data
|
|
||||||
inputs:
|
|
||||||
targetPath: "$(Agent.TempDirectory)/coverage/"
|
|
||||||
artifactName: "Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
|
|
||||||
1519
.github/BOTMETA.yml
vendored
1519
.github/BOTMETA.yml
vendored
File diff suppressed because it is too large
Load Diff
149
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
149
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,149 +0,0 @@
|
|||||||
---
|
|
||||||
name: Bug report
|
|
||||||
description: Create a report to help us improve
|
|
||||||
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
⚠
|
|
||||||
Verify first that your issue is not [already reported on GitHub][issue search].
|
|
||||||
Also test if the latest release and devel branch are affected too.
|
|
||||||
*Complete **all** sections as described, this form is processed automatically.*
|
|
||||||
|
|
||||||
[issue search]: https://github.com/ansible-collections/community.general/search?q=is%3Aissue&type=issues
|
|
||||||
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Summary
|
|
||||||
description: Explain the problem briefly below.
|
|
||||||
placeholder: >-
|
|
||||||
When I try to do X with the collection from the main branch on GitHub, Y
|
|
||||||
breaks in a way Z under the env E. Here are all the details I know
|
|
||||||
about this problem...
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Issue Type
|
|
||||||
# FIXME: Once GitHub allows defining the default choice, update this
|
|
||||||
options:
|
|
||||||
- Bug Report
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
# For smaller collections we could use a multi-select and hardcode the list
|
|
||||||
# May generate this list via GitHub action and walking files under https://github.com/ansible-collections/community.general/tree/main/plugins
|
|
||||||
# Select from list, filter as you type (`mysql` would only show the 3 mysql components)
|
|
||||||
# OR freeform - doesn't seem to be supported in adaptivecards
|
|
||||||
label: Component Name
|
|
||||||
description: >-
|
|
||||||
Write the short name of the module, plugin, task or feature below,
|
|
||||||
*use your best guess if unsure*.
|
|
||||||
placeholder: dnf, apt, yum, pip, user etc.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Ansible Version
|
|
||||||
description: >-
|
|
||||||
Paste verbatim output from `ansible --version` between
|
|
||||||
tripple backticks.
|
|
||||||
value: |
|
|
||||||
```console (paste below)
|
|
||||||
$ ansible --version
|
|
||||||
|
|
||||||
```
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Community.general Version
|
|
||||||
description: >-
|
|
||||||
Paste verbatim output from "ansible-galaxy collection list community.general"
|
|
||||||
between tripple backticks.
|
|
||||||
value: |
|
|
||||||
```console (paste below)
|
|
||||||
$ ansible-galaxy collection list community.general
|
|
||||||
|
|
||||||
```
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Configuration
|
|
||||||
description: >-
|
|
||||||
If this issue has an example piece of YAML that can help to reproduce this problem, please provide it.
|
|
||||||
This can be a piece of YAML from, e.g., an automation, script, scene or configuration.
|
|
||||||
Paste verbatim output from `ansible-config dump --only-changed` between quotes
|
|
||||||
value: |
|
|
||||||
```console (paste below)
|
|
||||||
$ ansible-config dump --only-changed
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: OS / Environment
|
|
||||||
description: >-
|
|
||||||
Provide all relevant information below, e.g. target OS versions,
|
|
||||||
network device firmware, etc.
|
|
||||||
placeholder: RHEL 8, CentOS Stream etc.
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Steps to Reproduce
|
|
||||||
description: |
|
|
||||||
Describe exactly how to reproduce the problem, using a minimal test-case. It would *really* help us understand your problem if you could also pased any playbooks, configs and commands you used.
|
|
||||||
|
|
||||||
**HINT:** You can paste https://gist.github.com links for larger files.
|
|
||||||
value: |
|
|
||||||
<!--- Paste example playbooks or commands between quotes below -->
|
|
||||||
```yaml (paste below)
|
|
||||||
|
|
||||||
```
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Expected Results
|
|
||||||
description: >-
|
|
||||||
Describe what you expected to happen when running the steps above.
|
|
||||||
placeholder: >-
|
|
||||||
I expected X to happen because I assumed Y.
|
|
||||||
that it did not.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Actual Results
|
|
||||||
description: |
|
|
||||||
Describe what actually happened. If possible run with extra verbosity (`-vvvv`).
|
|
||||||
|
|
||||||
Paste verbatim command output between quotes.
|
|
||||||
value: |
|
|
||||||
```console (paste below)
|
|
||||||
|
|
||||||
```
|
|
||||||
- type: checkboxes
|
|
||||||
attributes:
|
|
||||||
label: Code of Conduct
|
|
||||||
description: |
|
|
||||||
Read the [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--ansible-collections) first.
|
|
||||||
options:
|
|
||||||
- label: I agree to follow the Ansible Code of Conduct
|
|
||||||
required: true
|
|
||||||
...
|
|
||||||
27
.github/ISSUE_TEMPLATE/config.yml
vendored
27
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,27 +0,0 @@
|
|||||||
---
|
|
||||||
# Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser
|
|
||||||
blank_issues_enabled: false # default: true
|
|
||||||
contact_links:
|
|
||||||
- name: Security bug report
|
|
||||||
url: https://docs.ansible.com/ansible-core/devel/community/reporting_bugs_and_features.html?utm_medium=github&utm_source=issue_template_chooser_ansible_collections
|
|
||||||
about: |
|
|
||||||
Please learn how to report security vulnerabilities here.
|
|
||||||
|
|
||||||
For all security related bugs, email security@ansible.com
|
|
||||||
instead of using this issue tracker and you will receive
|
|
||||||
a prompt response.
|
|
||||||
|
|
||||||
For more information, see
|
|
||||||
https://docs.ansible.com/ansible/latest/community/reporting_bugs_and_features.html
|
|
||||||
- name: Ansible Code of Conduct
|
|
||||||
url: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_template_chooser_ansible_collections
|
|
||||||
about: Be nice to other members of the community.
|
|
||||||
- name: Talks to the community
|
|
||||||
url: https://docs.ansible.com/ansible/latest/community/communication.html?utm_medium=github&utm_source=issue_template_chooser#mailing-list-information
|
|
||||||
about: Please ask and answer usage questions here
|
|
||||||
- name: Working groups
|
|
||||||
url: https://github.com/ansible/community/wiki
|
|
||||||
about: Interested in improving a specific area? Become a part of a working group!
|
|
||||||
- name: For Enterprise
|
|
||||||
url: https://www.ansible.com/products/engine?utm_medium=github&utm_source=issue_template_chooser_ansible_collections
|
|
||||||
about: Red Hat offers support for the Ansible Automation Platform
|
|
||||||
125
.github/ISSUE_TEMPLATE/documentation_report.yml
vendored
125
.github/ISSUE_TEMPLATE/documentation_report.yml
vendored
@@ -1,125 +0,0 @@
|
|||||||
---
|
|
||||||
name: Documentation Report
|
|
||||||
description: Ask us about docs
|
|
||||||
# NOTE: issue body is enabled to allow screenshots
|
|
||||||
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
⚠
|
|
||||||
Verify first that your issue is not [already reported on GitHub][issue search].
|
|
||||||
Also test if the latest release and devel branch are affected too.
|
|
||||||
*Complete **all** sections as described, this form is processed automatically.*
|
|
||||||
|
|
||||||
[issue search]: https://github.com/ansible-collections/community.general/search?q=is%3Aissue&type=issues
|
|
||||||
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Summary
|
|
||||||
description: |
|
|
||||||
Explain the problem briefly below, add suggestions to wording or structure.
|
|
||||||
|
|
||||||
**HINT:** Did you know the documentation has an `Edit on GitHub` link on every page?
|
|
||||||
placeholder: >-
|
|
||||||
I was reading the Collection documentation of version X and I'm having
|
|
||||||
problems understanding Y. It would be very helpful if that got
|
|
||||||
rephrased as Z.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Issue Type
|
|
||||||
# FIXME: Once GitHub allows defining the default choice, update this
|
|
||||||
options:
|
|
||||||
- Documentation Report
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: input
|
|
||||||
attributes:
|
|
||||||
label: Component Name
|
|
||||||
description: >-
|
|
||||||
Write the short name of the rst file, module, plugin, task or
|
|
||||||
feature below, *use your best guess if unsure*.
|
|
||||||
placeholder: mysql_user
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Ansible Version
|
|
||||||
description: >-
|
|
||||||
Paste verbatim output from `ansible --version` between
|
|
||||||
tripple backticks.
|
|
||||||
value: |
|
|
||||||
```console (paste below)
|
|
||||||
$ ansible --version
|
|
||||||
|
|
||||||
```
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Community.general Version
|
|
||||||
description: >-
|
|
||||||
Paste verbatim output from "ansible-galaxy collection list community.general"
|
|
||||||
between tripple backticks.
|
|
||||||
value: |
|
|
||||||
```console (paste below)
|
|
||||||
$ ansible-galaxy collection list community.general
|
|
||||||
|
|
||||||
```
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Configuration
|
|
||||||
description: >-
|
|
||||||
Paste verbatim output from `ansible-config dump --only-changed` between quotes.
|
|
||||||
value: |
|
|
||||||
```console (paste below)
|
|
||||||
$ ansible-config dump --only-changed
|
|
||||||
|
|
||||||
```
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: OS / Environment
|
|
||||||
description: >-
|
|
||||||
Provide all relevant information below, e.g. OS version,
|
|
||||||
browser, etc.
|
|
||||||
placeholder: Fedora 33, Firefox etc.
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Additional Information
|
|
||||||
description: |
|
|
||||||
Describe how this improves the documentation, e.g. before/after situation or screenshots.
|
|
||||||
|
|
||||||
**Tip:** It's not possible to upload the screenshot via this field directly but you can use the last textarea in this form to attach them.
|
|
||||||
|
|
||||||
**HINT:** You can paste https://gist.github.com links for larger files.
|
|
||||||
placeholder: >-
|
|
||||||
When the improvement is applied, it makes it more straightforward
|
|
||||||
to understand X.
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
- type: checkboxes
|
|
||||||
attributes:
|
|
||||||
label: Code of Conduct
|
|
||||||
description: |
|
|
||||||
Read the [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--ansible-collections) first.
|
|
||||||
options:
|
|
||||||
- label: I agree to follow the Ansible Code of Conduct
|
|
||||||
required: true
|
|
||||||
...
|
|
||||||
69
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
69
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -1,69 +0,0 @@
|
|||||||
---
|
|
||||||
name: Feature request
|
|
||||||
description: Suggest an idea for this project
|
|
||||||
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
⚠
|
|
||||||
Verify first that your issue is not [already reported on GitHub][issue search].
|
|
||||||
Also test if the latest release and devel branch are affected too.
|
|
||||||
*Complete **all** sections as described, this form is processed automatically.*
|
|
||||||
|
|
||||||
[issue search]: https://github.com/ansible-collections/community.general/search?q=is%3Aissue&type=issues
|
|
||||||
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Summary
|
|
||||||
description: Describe the new feature/improvement briefly below.
|
|
||||||
placeholder: >-
|
|
||||||
I am trying to do X with the collection from the main branch on GitHub and
|
|
||||||
I think that implementing a feature Y would be very helpful for me and
|
|
||||||
every other user of community.general because of Z.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Issue Type
|
|
||||||
# FIXME: Once GitHub allows defining the default choice, update this
|
|
||||||
options:
|
|
||||||
- Feature Idea
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: input
|
|
||||||
attributes:
|
|
||||||
label: Component Name
|
|
||||||
description: >-
|
|
||||||
Write the short name of the module, plugin, task or feature below,
|
|
||||||
*use your best guess if unsure*.
|
|
||||||
placeholder: dnf, apt, yum, pip, user etc.
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Additional Information
|
|
||||||
description: |
|
|
||||||
Describe how the feature would be used, why it is needed and what it would solve.
|
|
||||||
|
|
||||||
**HINT:** You can paste https://gist.github.com links for larger files.
|
|
||||||
value: |
|
|
||||||
<!--- Paste example playbooks or commands between quotes below -->
|
|
||||||
```yaml (paste below)
|
|
||||||
|
|
||||||
```
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: checkboxes
|
|
||||||
attributes:
|
|
||||||
label: Code of Conduct
|
|
||||||
description: |
|
|
||||||
Read the [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--ansible-collections) first.
|
|
||||||
options:
|
|
||||||
- label: I agree to follow the Ansible Code of Conduct
|
|
||||||
required: true
|
|
||||||
...
|
|
||||||
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
@@ -1,7 +0,0 @@
|
|||||||
---
|
|
||||||
version: 2
|
|
||||||
updates:
|
|
||||||
- package-ecosystem: "github-actions"
|
|
||||||
directory: "/"
|
|
||||||
schedule:
|
|
||||||
interval: "weekly"
|
|
||||||
15
.github/workflows/codeql-analysis.yml
vendored
15
.github/workflows/codeql-analysis.yml
vendored
@@ -4,21 +4,14 @@ on:
|
|||||||
schedule:
|
schedule:
|
||||||
- cron: '26 19 * * 1'
|
- cron: '26 19 * * 1'
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
CodeQL-Build:
|
CodeQL-Build:
|
||||||
|
|
||||||
permissions:
|
|
||||||
actions: read # for github/codeql-action/init to get workflow details
|
|
||||||
contents: read # for actions/checkout to fetch code
|
|
||||||
security-events: write # for github/codeql-action/autobuild to send a status report
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
# We must fetch at least the immediate parents so that if this is
|
# We must fetch at least the immediate parents so that if this is
|
||||||
# a pull request then we can checkout the head.
|
# a pull request then we can checkout the head.
|
||||||
@@ -31,7 +24,7 @@ jobs:
|
|||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v2
|
uses: github/codeql-action/init@v1
|
||||||
# Override language selection by uncommenting this and choosing your languages
|
# Override language selection by uncommenting this and choosing your languages
|
||||||
# with:
|
# with:
|
||||||
# languages: go, javascript, csharp, python, cpp, java
|
# languages: go, javascript, csharp, python, cpp, java
|
||||||
@@ -39,7 +32,7 @@ jobs:
|
|||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v2
|
uses: github/codeql-action/autobuild@v1
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# 📚 https://git.io/JvXDl
|
# 📚 https://git.io/JvXDl
|
||||||
@@ -53,4 +46,4 @@ jobs:
|
|||||||
# make release
|
# make release
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v2
|
uses: github/codeql-action/analyze@v1
|
||||||
|
|||||||
171
.gitignore
vendored
171
.gitignore
vendored
@@ -1,6 +1,6 @@
|
|||||||
|
|
||||||
# Created by https://www.toptal.com/developers/gitignore/api/vim,git,macos,linux,pydev,emacs,dotenv,python,windows,webstorm,pycharm+all,jupyternotebooks
|
# Created by https://www.gitignore.io/api/git,linux,pydev,python,windows,pycharm+all,jupyternotebook,vim,webstorm,emacs,dotenv
|
||||||
# Edit at https://www.toptal.com/developers/gitignore?templates=vim,git,macos,linux,pydev,emacs,dotenv,python,windows,webstorm,pycharm+all,jupyternotebooks
|
# Edit at https://www.gitignore.io/?templates=git,linux,pydev,python,windows,pycharm+all,jupyternotebook,vim,webstorm,emacs,dotenv
|
||||||
|
|
||||||
### dotenv ###
|
### dotenv ###
|
||||||
.env
|
.env
|
||||||
@@ -71,19 +71,7 @@ flycheck_*.el
|
|||||||
*_LOCAL_*.txt
|
*_LOCAL_*.txt
|
||||||
*_REMOTE_*.txt
|
*_REMOTE_*.txt
|
||||||
|
|
||||||
### JupyterNotebooks ###
|
#!! ERROR: jupyternotebook is undefined. Use list command to see defined gitignore types !!#
|
||||||
# gitignore template for Jupyter Notebooks
|
|
||||||
# website: http://jupyter.org/
|
|
||||||
|
|
||||||
.ipynb_checkpoints
|
|
||||||
*/.ipynb_checkpoints/*
|
|
||||||
|
|
||||||
# IPython
|
|
||||||
profile_default/
|
|
||||||
ipython_config.py
|
|
||||||
|
|
||||||
# Remove previous ipynb_checkpoints
|
|
||||||
# git rm -r .ipynb_checkpoints/
|
|
||||||
|
|
||||||
### Linux ###
|
### Linux ###
|
||||||
|
|
||||||
@@ -99,41 +87,8 @@ ipython_config.py
|
|||||||
# .nfs files are created when an open file is removed but is still being accessed
|
# .nfs files are created when an open file is removed but is still being accessed
|
||||||
.nfs*
|
.nfs*
|
||||||
|
|
||||||
### macOS ###
|
|
||||||
# General
|
|
||||||
.DS_Store
|
|
||||||
.AppleDouble
|
|
||||||
.LSOverride
|
|
||||||
|
|
||||||
# Icon must end with two \r
|
|
||||||
Icon
|
|
||||||
|
|
||||||
|
|
||||||
# Thumbnails
|
|
||||||
._*
|
|
||||||
|
|
||||||
# Files that might appear in the root of a volume
|
|
||||||
.DocumentRevisions-V100
|
|
||||||
.fseventsd
|
|
||||||
.Spotlight-V100
|
|
||||||
.TemporaryItems
|
|
||||||
.Trashes
|
|
||||||
.VolumeIcon.icns
|
|
||||||
.com.apple.timemachine.donotpresent
|
|
||||||
|
|
||||||
# Directories potentially created on remote AFP share
|
|
||||||
.AppleDB
|
|
||||||
.AppleDesktop
|
|
||||||
Network Trash Folder
|
|
||||||
Temporary Items
|
|
||||||
.apdisk
|
|
||||||
|
|
||||||
### macOS Patch ###
|
|
||||||
# iCloud generated files
|
|
||||||
*.icloud
|
|
||||||
|
|
||||||
### PyCharm+all ###
|
### PyCharm+all ###
|
||||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
|
||||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||||
|
|
||||||
# User-specific stuff
|
# User-specific stuff
|
||||||
@@ -143,9 +98,6 @@ Temporary Items
|
|||||||
.idea/**/dictionaries
|
.idea/**/dictionaries
|
||||||
.idea/**/shelf
|
.idea/**/shelf
|
||||||
|
|
||||||
# AWS User-specific
|
|
||||||
.idea/**/aws.xml
|
|
||||||
|
|
||||||
# Generated files
|
# Generated files
|
||||||
.idea/**/contentModel.xml
|
.idea/**/contentModel.xml
|
||||||
|
|
||||||
@@ -166,9 +118,6 @@ Temporary Items
|
|||||||
# When using Gradle or Maven with auto-import, you should exclude module files,
|
# When using Gradle or Maven with auto-import, you should exclude module files,
|
||||||
# since they will be recreated, and may cause churn. Uncomment if using
|
# since they will be recreated, and may cause churn. Uncomment if using
|
||||||
# auto-import.
|
# auto-import.
|
||||||
# .idea/artifacts
|
|
||||||
# .idea/compiler.xml
|
|
||||||
# .idea/jarRepositories.xml
|
|
||||||
# .idea/modules.xml
|
# .idea/modules.xml
|
||||||
# .idea/*.iml
|
# .idea/*.iml
|
||||||
# .idea/modules
|
# .idea/modules
|
||||||
@@ -196,9 +145,6 @@ atlassian-ide-plugin.xml
|
|||||||
# Cursive Clojure plugin
|
# Cursive Clojure plugin
|
||||||
.idea/replstate.xml
|
.idea/replstate.xml
|
||||||
|
|
||||||
# SonarLint plugin
|
|
||||||
.idea/sonarlint/
|
|
||||||
|
|
||||||
# Crashlytics plugin (for Android Studio and IntelliJ)
|
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||||
com_crashlytics_export_strings.xml
|
com_crashlytics_export_strings.xml
|
||||||
crashlytics.properties
|
crashlytics.properties
|
||||||
@@ -212,13 +158,20 @@ fabric.properties
|
|||||||
.idea/caches/build_file_checksums.ser
|
.idea/caches/build_file_checksums.ser
|
||||||
|
|
||||||
### PyCharm+all Patch ###
|
### PyCharm+all Patch ###
|
||||||
# Ignore everything but code style settings and run configurations
|
# Ignores the whole .idea folder and all .iml files
|
||||||
# that are supposed to be shared within teams.
|
# See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360
|
||||||
|
|
||||||
.idea/*
|
.idea/
|
||||||
|
|
||||||
!.idea/codeStyles
|
# Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-249601023
|
||||||
!.idea/runConfigurations
|
|
||||||
|
*.iml
|
||||||
|
modules.xml
|
||||||
|
.idea/misc.xml
|
||||||
|
*.ipr
|
||||||
|
|
||||||
|
# Sonarlint plugin
|
||||||
|
.idea/sonarlint
|
||||||
|
|
||||||
### pydev ###
|
### pydev ###
|
||||||
.pydevproject
|
.pydevproject
|
||||||
@@ -245,6 +198,7 @@ parts/
|
|||||||
sdist/
|
sdist/
|
||||||
var/
|
var/
|
||||||
wheels/
|
wheels/
|
||||||
|
pip-wheel-metadata/
|
||||||
share/python-wheels/
|
share/python-wheels/
|
||||||
*.egg-info/
|
*.egg-info/
|
||||||
.installed.cfg
|
.installed.cfg
|
||||||
@@ -271,25 +225,13 @@ htmlcov/
|
|||||||
nosetests.xml
|
nosetests.xml
|
||||||
coverage.xml
|
coverage.xml
|
||||||
*.cover
|
*.cover
|
||||||
*.py,cover
|
|
||||||
.hypothesis/
|
.hypothesis/
|
||||||
.pytest_cache/
|
.pytest_cache/
|
||||||
cover/
|
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
*.pot
|
*.pot
|
||||||
|
|
||||||
# Django stuff:
|
|
||||||
*.log
|
|
||||||
local_settings.py
|
|
||||||
db.sqlite3
|
|
||||||
db.sqlite3-journal
|
|
||||||
|
|
||||||
# Flask stuff:
|
|
||||||
instance/
|
|
||||||
.webassets-cache
|
|
||||||
|
|
||||||
# Scrapy stuff:
|
# Scrapy stuff:
|
||||||
.scrapy
|
.scrapy
|
||||||
|
|
||||||
@@ -297,17 +239,10 @@ instance/
|
|||||||
docs/_build/
|
docs/_build/
|
||||||
|
|
||||||
# PyBuilder
|
# PyBuilder
|
||||||
.pybuilder/
|
|
||||||
target/
|
target/
|
||||||
|
|
||||||
# Jupyter Notebook
|
|
||||||
|
|
||||||
# IPython
|
|
||||||
|
|
||||||
# pyenv
|
# pyenv
|
||||||
# For a library or package, you might want to ignore these files since the code is
|
.python-version
|
||||||
# intended to run in multiple environments; otherwise, check them in:
|
|
||||||
# .python-version
|
|
||||||
|
|
||||||
# pipenv
|
# pipenv
|
||||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
@@ -316,39 +251,12 @@ target/
|
|||||||
# install all needed dependencies.
|
# install all needed dependencies.
|
||||||
#Pipfile.lock
|
#Pipfile.lock
|
||||||
|
|
||||||
# poetry
|
# celery beat schedule file
|
||||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
|
||||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
|
||||||
# commonly ignored for libraries.
|
|
||||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
|
||||||
#poetry.lock
|
|
||||||
|
|
||||||
# pdm
|
|
||||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
|
||||||
#pdm.lock
|
|
||||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
|
||||||
# in version control.
|
|
||||||
# https://pdm.fming.dev/#use-with-ide
|
|
||||||
.pdm.toml
|
|
||||||
|
|
||||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
|
||||||
__pypackages__/
|
|
||||||
|
|
||||||
# Celery stuff
|
|
||||||
celerybeat-schedule
|
celerybeat-schedule
|
||||||
celerybeat.pid
|
|
||||||
|
|
||||||
# SageMath parsed files
|
# SageMath parsed files
|
||||||
*.sage.py
|
*.sage.py
|
||||||
|
|
||||||
# Environments
|
|
||||||
.venv
|
|
||||||
env/
|
|
||||||
venv/
|
|
||||||
ENV/
|
|
||||||
env.bak/
|
|
||||||
venv.bak/
|
|
||||||
|
|
||||||
# Spyder project settings
|
# Spyder project settings
|
||||||
.spyderproject
|
.spyderproject
|
||||||
.spyproject
|
.spyproject
|
||||||
@@ -356,6 +264,10 @@ venv.bak/
|
|||||||
# Rope project settings
|
# Rope project settings
|
||||||
.ropeproject
|
.ropeproject
|
||||||
|
|
||||||
|
# Mr Developer
|
||||||
|
.mr.developer.cfg
|
||||||
|
.project
|
||||||
|
|
||||||
# mkdocs documentation
|
# mkdocs documentation
|
||||||
/site
|
/site
|
||||||
|
|
||||||
@@ -367,23 +279,9 @@ dmypy.json
|
|||||||
# Pyre type checker
|
# Pyre type checker
|
||||||
.pyre/
|
.pyre/
|
||||||
|
|
||||||
# pytype static type analyzer
|
|
||||||
.pytype/
|
|
||||||
|
|
||||||
# Cython debug symbols
|
|
||||||
cython_debug/
|
|
||||||
|
|
||||||
# PyCharm
|
|
||||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
|
||||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
|
||||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
|
||||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
|
||||||
#.idea/
|
|
||||||
|
|
||||||
### Vim ###
|
### Vim ###
|
||||||
# Swap
|
# Swap
|
||||||
[._]*.s[a-v][a-z]
|
[._]*.s[a-v][a-z]
|
||||||
!*.svg # comment out if you don't need vector files
|
|
||||||
[._]*.sw[a-p]
|
[._]*.sw[a-p]
|
||||||
[._]s[a-rt-v][a-z]
|
[._]s[a-rt-v][a-z]
|
||||||
[._]ss[a-gi-z]
|
[._]ss[a-gi-z]
|
||||||
@@ -401,13 +299,11 @@ tags
|
|||||||
[._]*.un~
|
[._]*.un~
|
||||||
|
|
||||||
### WebStorm ###
|
### WebStorm ###
|
||||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
|
||||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||||
|
|
||||||
# User-specific stuff
|
# User-specific stuff
|
||||||
|
|
||||||
# AWS User-specific
|
|
||||||
|
|
||||||
# Generated files
|
# Generated files
|
||||||
|
|
||||||
# Sensitive or high-churn files
|
# Sensitive or high-churn files
|
||||||
@@ -418,9 +314,6 @@ tags
|
|||||||
# When using Gradle or Maven with auto-import, you should exclude module files,
|
# When using Gradle or Maven with auto-import, you should exclude module files,
|
||||||
# since they will be recreated, and may cause churn. Uncomment if using
|
# since they will be recreated, and may cause churn. Uncomment if using
|
||||||
# auto-import.
|
# auto-import.
|
||||||
# .idea/artifacts
|
|
||||||
# .idea/compiler.xml
|
|
||||||
# .idea/jarRepositories.xml
|
|
||||||
# .idea/modules.xml
|
# .idea/modules.xml
|
||||||
# .idea/*.iml
|
# .idea/*.iml
|
||||||
# .idea/modules
|
# .idea/modules
|
||||||
@@ -441,8 +334,6 @@ tags
|
|||||||
|
|
||||||
# Cursive Clojure plugin
|
# Cursive Clojure plugin
|
||||||
|
|
||||||
# SonarLint plugin
|
|
||||||
|
|
||||||
# Crashlytics plugin (for Android Studio and IntelliJ)
|
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||||
|
|
||||||
# Editor-based Rest Client
|
# Editor-based Rest Client
|
||||||
@@ -458,27 +349,15 @@ tags
|
|||||||
# *.ipr
|
# *.ipr
|
||||||
|
|
||||||
# Sonarlint plugin
|
# Sonarlint plugin
|
||||||
# https://plugins.jetbrains.com/plugin/7973-sonarlint
|
|
||||||
.idea/**/sonarlint/
|
.idea/**/sonarlint/
|
||||||
|
|
||||||
# SonarQube Plugin
|
# SonarQube Plugin
|
||||||
# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
|
|
||||||
.idea/**/sonarIssues.xml
|
.idea/**/sonarIssues.xml
|
||||||
|
|
||||||
# Markdown Navigator plugin
|
# Markdown Navigator plugin
|
||||||
# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
|
|
||||||
.idea/**/markdown-navigator.xml
|
.idea/**/markdown-navigator.xml
|
||||||
.idea/**/markdown-navigator-enh.xml
|
|
||||||
.idea/**/markdown-navigator/
|
.idea/**/markdown-navigator/
|
||||||
|
|
||||||
# Cache file creation bug
|
|
||||||
# See https://youtrack.jetbrains.com/issue/JBR-2257
|
|
||||||
.idea/$CACHE_FILE$
|
|
||||||
|
|
||||||
# CodeStream plugin
|
|
||||||
# https://plugins.jetbrains.com/plugin/12206-codestream
|
|
||||||
.idea/codestream.xml
|
|
||||||
|
|
||||||
### Windows ###
|
### Windows ###
|
||||||
# Windows thumbnail cache files
|
# Windows thumbnail cache files
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
@@ -505,4 +384,4 @@ $RECYCLE.BIN/
|
|||||||
# Windows shortcuts
|
# Windows shortcuts
|
||||||
*.lnk
|
*.lnk
|
||||||
|
|
||||||
# End of https://www.toptal.com/developers/gitignore/api/vim,git,macos,linux,pydev,emacs,dotenv,python,windows,webstorm,pycharm+all,jupyternotebooks
|
# End of https://www.gitignore.io/api/git,linux,pydev,python,windows,pycharm+all,jupyternotebook,vim,webstorm,emacs,dotenv
|
||||||
|
|||||||
@@ -1,19 +0,0 @@
|
|||||||
---
|
|
||||||
repos:
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
|
||||||
rev: v4.0.1
|
|
||||||
hooks:
|
|
||||||
- id: trailing-whitespace
|
|
||||||
- id: end-of-file-fixer
|
|
||||||
- id: mixed-line-ending
|
|
||||||
args: [--fix=lf]
|
|
||||||
- id: fix-encoding-pragma
|
|
||||||
- id: check-ast
|
|
||||||
- id: check-merge-conflict
|
|
||||||
- id: check-symlinks
|
|
||||||
- repo: https://github.com/pre-commit/pygrep-hooks
|
|
||||||
rev: v1.9.0
|
|
||||||
hooks:
|
|
||||||
- id: rst-backticks
|
|
||||||
types: [file]
|
|
||||||
files: changelogs/fragments/.*\.(yml|yaml)$
|
|
||||||
987
CHANGELOG.rst
987
CHANGELOG.rst
File diff suppressed because it is too large
Load Diff
143
CONTRIBUTING.md
143
CONTRIBUTING.md
@@ -1,143 +0,0 @@
|
|||||||
# Contributing
|
|
||||||
|
|
||||||
We follow [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) in all our contributions and interactions within this repository.
|
|
||||||
|
|
||||||
If you are a committer, also refer to the [collection's committer guidelines](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md).
|
|
||||||
|
|
||||||
## Issue tracker
|
|
||||||
|
|
||||||
Whether you are looking for an opportunity to contribute or you found a bug and already know how to solve it, please go to the [issue tracker](https://github.com/ansible-collections/community.general/issues).
|
|
||||||
There you can find feature ideas to implement, reports about bugs to solve, or submit an issue to discuss your idea before implementing it which can help choose a right direction at the beginning of your work and potentially save a lot of time and effort.
|
|
||||||
Also somebody may already have started discussing or working on implementing the same or a similar idea,
|
|
||||||
so you can cooperate to create a better solution together.
|
|
||||||
|
|
||||||
* If you are interested in starting with an easy issue, look for [issues with an `easyfix` label](https://github.com/ansible-collections/community.general/labels/easyfix).
|
|
||||||
* Often issues that are waiting for contributors to pick up have [the `waiting_on_contributor` label](https://github.com/ansible-collections/community.general/labels/waiting_on_contributor).
|
|
||||||
|
|
||||||
## Open pull requests
|
|
||||||
|
|
||||||
Look through currently [open pull requests](https://github.com/ansible-collections/community.general/pulls).
|
|
||||||
You can help by reviewing them. Reviews help move pull requests to merge state. Some good pull requests cannot be merged only due to a lack of reviews. And it is always worth saying that good reviews are often more valuable than pull requests themselves.
|
|
||||||
Note that reviewing does not only mean code review, but also offering comments on new interfaces added to existing plugins/modules, interfaces of new plugins/modules, improving language (not everyone is a native english speaker), or testing bugfixes and new features!
|
|
||||||
|
|
||||||
Also, consider taking up a valuable, reviewed, but abandoned pull request which you could politely ask the original authors to complete yourself.
|
|
||||||
|
|
||||||
* Try committing your changes with an informative but short commit message.
|
|
||||||
* Do not squash your commits and force-push to your branch if not needed. Reviews of your pull request are much easier with individual commits to comprehend the pull request history. All commits of your pull request branch will be squashed into one commit by GitHub upon merge.
|
|
||||||
* Do not add merge commits to your PR. The bot will complain and you will have to rebase ([instructions for rebasing](https://docs.ansible.com/ansible/latest/dev_guide/developing_rebasing.html)) to remove them before your PR can be merged. To avoid that git automatically does merges during pulls, you can configure it to do rebases instead by running `git config pull.rebase true` inside the repository checkout.
|
|
||||||
* Make sure your PR includes a [changelog fragment](https://docs.ansible.com/ansible/devel/community/development_process.html#changelogs-how-to). (You must not include a fragment for new modules or new plugins, except for test and filter plugins. Also you shouldn't include one for docs-only changes. If you're not sure, simply don't include one, we'll tell you whether one is needed or not :) )
|
|
||||||
* Avoid reformatting unrelated parts of the codebase in your PR. These types of changes will likely be requested for reversion, create additional work for reviewers, and may cause approval to be delayed.
|
|
||||||
|
|
||||||
You can also read [our Quick-start development guide](https://github.com/ansible/community-docs/blob/main/create_pr_quick_start_guide.rst).
|
|
||||||
|
|
||||||
## Test pull requests
|
|
||||||
|
|
||||||
If you want to test a PR locally, refer to [our testing guide](https://github.com/ansible/community-docs/blob/main/test_pr_locally_guide.rst) for instructions on how do it quickly.
|
|
||||||
|
|
||||||
If you find any inconsistencies or places in this document which can be improved, feel free to raise an issue or pull request to fix it.
|
|
||||||
|
|
||||||
## Run sanity, unit or integration tests locally
|
|
||||||
|
|
||||||
You have to check out the repository into a specific path structure to be able to run `ansible-test`. The path to the git checkout must end with `.../ansible_collections/community/general`. Please see [our testing guide](https://github.com/ansible/community-docs/blob/main/test_pr_locally_guide.rst) for instructions on how to check out the repository into a correct path structure. The short version of these instructions is:
|
|
||||||
|
|
||||||
```.bash
|
|
||||||
mkdir -p ~/dev/ansible_collections/community
|
|
||||||
git clone https://github.com/ansible-collections/community.general.git ~/dev/ansible_collections/community/general
|
|
||||||
cd ~/dev/ansible_collections/community/general
|
|
||||||
```
|
|
||||||
|
|
||||||
Then you can run `ansible-test` (which is a part of [ansible-core](https://pypi.org/project/ansible-core/)) inside the checkout. The following example commands expect that you have installed Docker or Podman. Note that Podman has only been supported by more recent ansible-core releases. If you are using Docker, the following will work with Ansible 2.9+.
|
|
||||||
|
|
||||||
The following commands show how to run sanity tests:
|
|
||||||
|
|
||||||
```.bash
|
|
||||||
# Run sanity tests for all files in the collection:
|
|
||||||
ansible-test sanity --docker -v
|
|
||||||
|
|
||||||
# Run sanity tests for the given files and directories:
|
|
||||||
ansible-test sanity --docker -v plugins/modules/system/pids.py tests/integration/targets/pids/
|
|
||||||
```
|
|
||||||
|
|
||||||
The following commands show how to run unit tests:
|
|
||||||
|
|
||||||
```.bash
|
|
||||||
# Run all unit tests:
|
|
||||||
ansible-test units --docker -v
|
|
||||||
|
|
||||||
# Run all unit tests for one Python version (a lot faster):
|
|
||||||
ansible-test units --docker -v --python 3.8
|
|
||||||
|
|
||||||
# Run a specific unit test (for the nmcli module) for one Python version:
|
|
||||||
ansible-test units --docker -v --python 3.8 tests/unit/plugins/modules/net_tools/test_nmcli.py
|
|
||||||
```
|
|
||||||
|
|
||||||
The following commands show how to run integration tests:
|
|
||||||
|
|
||||||
```.bash
|
|
||||||
# Run integration tests for the interfaces_files module in a Docker container using the
|
|
||||||
# fedora35 operating system image (the supported images depend on your ansible-core version):
|
|
||||||
ansible-test integration --docker fedora35 -v interfaces_file
|
|
||||||
|
|
||||||
# Run integration tests for the flattened lookup **without any isolation**:
|
|
||||||
ansible-test integration -v lookup_flattened
|
|
||||||
```
|
|
||||||
|
|
||||||
If you are unsure about the integration test target name for a module or plugin, you can take a look in `tests/integration/targets/`. Tests for plugins have the plugin type prepended.
|
|
||||||
|
|
||||||
## Creating new modules or plugins
|
|
||||||
|
|
||||||
Creating new modules and plugins requires a bit more work than other Pull Requests.
|
|
||||||
|
|
||||||
1. Please make sure that your new module or plugin is of interest to a larger audience. Very specialized modules or plugins that
|
|
||||||
can only be used by very few people should better be added to more specialized collections.
|
|
||||||
|
|
||||||
2. Please do not add more than one plugin/module in one PR, especially if it is the first plugin/module you are contributing.
|
|
||||||
That makes it easier for reviewers, and increases the chance that your PR will get merged. If you plan to contribute a group
|
|
||||||
of plugins/modules (say, more than a module and a corresponding ``_info`` module), please mention that in the first PR. In
|
|
||||||
such cases, you also have to think whether it is better to publish the group of plugins/modules in a new collection.
|
|
||||||
|
|
||||||
3. When creating a new module or plugin, please make sure that you follow various guidelines:
|
|
||||||
|
|
||||||
- Follow [development conventions](https://docs.ansible.com/ansible/devel/dev_guide/developing_modules_best_practices.html);
|
|
||||||
- Follow [documentation standards](https://docs.ansible.com/ansible/devel/dev_guide/developing_modules_documenting.html) and
|
|
||||||
the [Ansible style guide](https://docs.ansible.com/ansible/devel/dev_guide/style_guide/index.html#style-guide);
|
|
||||||
- Make sure your modules and plugins are [GPL-3.0-or-later](https://www.gnu.org/licenses/gpl-3.0-standalone.html) licensed
|
|
||||||
(new module_utils can also be [BSD-2-clause](https://opensource.org/licenses/BSD-2-Clause) licensed);
|
|
||||||
- Make sure that new plugins and modules have tests (unit tests, integration tests, or both); it is preferable to have some tests
|
|
||||||
which run in CI.
|
|
||||||
|
|
||||||
4. For modules and action plugins, make sure to create your module/plugin in the correct subdirectory, and add a redirect entry
|
|
||||||
in `meta/runtime.yml`. For example, for the `aerospike_migrations` module located in
|
|
||||||
`plugins/modules/database/aerospike/aerospike_migrations.py`, you need to create the following entry:
|
|
||||||
```.yaml
|
|
||||||
aerospike_migrations:
|
|
||||||
redirect: community.general.database.aerospike.aerospike_migrations
|
|
||||||
```
|
|
||||||
Here, the relative path `database/aerospike/` is inserted into the module's FQCN (Fully Qualified Collection Name) after the
|
|
||||||
collection's name and before the module's name. This must not be done for other plugin types but modules and action plugins!
|
|
||||||
|
|
||||||
- Action plugins need to be accompanied by a module, even if the module file only contains documentation
|
|
||||||
(`DOCUMENTATION`, `EXAMPLES` and `RETURN`). The module must have the same name and directory path in `plugins/modules/`
|
|
||||||
than the action plugin has in `plugins/action/`.
|
|
||||||
|
|
||||||
5. Make sure to add a BOTMETA entry for your new module/plugin in `.github/BOTMETA.yml`. Search for other plugins/modules in the
|
|
||||||
same directory to see how entries could look. You should list all authors either as `maintainers` or under `ignore`. People
|
|
||||||
listed as `maintainers` will be pinged for new issues and PRs that modify the module/plugin or its tests.
|
|
||||||
|
|
||||||
When you add a new plugin/module, we expect that you perform maintainer duty for at least some time after contributing it.
|
|
||||||
|
|
||||||
## pre-commit
|
|
||||||
|
|
||||||
To help ensure high-quality contributions this repository includes a [pre-commit](https://pre-commit.com) configuration which
|
|
||||||
corrects and tests against common issues that would otherwise cause CI to fail. To begin using these pre-commit hooks see
|
|
||||||
the [Installation](#installation) section below.
|
|
||||||
|
|
||||||
This is optional and not required to contribute to this repository.
|
|
||||||
|
|
||||||
### Installation
|
|
||||||
|
|
||||||
Follow the [instructions](https://pre-commit.com/#install) provided with pre-commit and run `pre-commit install` under the repository base. If for any reason you would like to disable the pre-commit hooks run `pre-commit uninstall`.
|
|
||||||
|
|
||||||
This is optional to run it locally.
|
|
||||||
|
|
||||||
You can trigger it locally with `pre-commit run --all-files` or even to run only for a given file `pre-commit run --files YOUR_FILE`.
|
|
||||||
91
README.md
91
README.md
@@ -1,25 +1,12 @@
|
|||||||
# Community General Collection
|
# Community General Collection
|
||||||
|
|
||||||
[](https://dev.azure.com/ansible/community.general/_build?definitionId=31)
|
[](https://app.shippable.com/github/ansible-collections/community.general/dashboard) [](https://codecov.io/gh/ansible-collections/community.general)
|
||||||
[](https://codecov.io/gh/ansible-collections/community.general)
|
|
||||||
|
|
||||||
This repository contains the `community.general` Ansible Collection. The collection is a part of the Ansible package and includes many modules and plugins supported by Ansible community which are not part of more specialized community collections.
|
This repo contains the `community.general` Ansible Collection. The collection includes many modules and plugins supported by Ansible community which are not part of more specialized community collections.
|
||||||
|
|
||||||
You can find [documentation for this collection on the Ansible docs site](https://docs.ansible.com/ansible/latest/collections/community/general/).
|
|
||||||
|
|
||||||
Please note that this collection does **not** support Windows targets. Only connection plugins included in this collection might support Windows targets, and will explicitly mention that in their documentation if they do so.
|
|
||||||
|
|
||||||
## Code of Conduct
|
|
||||||
|
|
||||||
We follow [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) in all our interactions within this project.
|
|
||||||
|
|
||||||
If you encounter abusive behavior violating the [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html), please refer to the [policy violations](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html#policy-violations) section of the Code of Conduct for information on how to raise a complaint.
|
|
||||||
|
|
||||||
## Tested with Ansible
|
## Tested with Ansible
|
||||||
|
|
||||||
Tested with the current ansible-core 2.11, ansible-core 2.12, ansible-core 2.13 releases and the current development version of ansible-core. Ansible-core versions before 2.11.0 are not supported. This includes all ansible-base 2.10 and Ansible 2.9 releases.
|
Tested with the current Ansible 2.9 and 2.10 releases and the current development version of Ansible. Ansible versions before 2.9.10 are not supported.
|
||||||
|
|
||||||
Parts of this collection will not work with ansible-core 2.11 on Python 3.12+.
|
|
||||||
|
|
||||||
## External requirements
|
## External requirements
|
||||||
|
|
||||||
@@ -27,13 +14,11 @@ Some modules and plugins require external libraries. Please check the requiremen
|
|||||||
|
|
||||||
## Included content
|
## Included content
|
||||||
|
|
||||||
Please check the included content on the [Ansible Galaxy page for this collection](https://galaxy.ansible.com/community/general) or the [documentation on the Ansible docs site](https://docs.ansible.com/ansible/latest/collections/community/general/).
|
Please check the included content on the [Ansible Galaxy page for this collection](https://galaxy.ansible.com/community/general).
|
||||||
|
|
||||||
## Using this collection
|
## Using this collection
|
||||||
|
|
||||||
This collection is shipped with the Ansible package. So if you have it installed, no more action is required.
|
Before using the General community collection, you need to install the collection with the `ansible-galaxy` CLI:
|
||||||
|
|
||||||
If you have a minimal installation (only Ansible Core installed) or you want to use the latest version of the collection along with the whole Ansible package, you need to install the collection from [Ansible Galaxy](https://galaxy.ansible.com/community/general) manually with the `ansible-galaxy` command-line tool:
|
|
||||||
|
|
||||||
ansible-galaxy collection install community.general
|
ansible-galaxy collection install community.general
|
||||||
|
|
||||||
@@ -44,79 +29,49 @@ collections:
|
|||||||
- name: community.general
|
- name: community.general
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that if you install the collection manually, it will not be upgraded automatically when you upgrade the Ansible package. To upgrade the collection to the latest available version, run the following command:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
ansible-galaxy collection install community.general --upgrade
|
|
||||||
```
|
|
||||||
|
|
||||||
You can also install a specific version of the collection, for example, if you need to downgrade when something is broken in the latest version (please report an issue in this repository). Use the following syntax where `X.Y.Z` can be any [available version](https://galaxy.ansible.com/community/general):
|
|
||||||
|
|
||||||
```bash
|
|
||||||
ansible-galaxy collection install community.general:==X.Y.Z
|
|
||||||
```
|
|
||||||
|
|
||||||
See [Ansible Using collections](https://docs.ansible.com/ansible/latest/user_guide/collections_using.html) for more details.
|
See [Ansible Using collections](https://docs.ansible.com/ansible/latest/user_guide/collections_using.html) for more details.
|
||||||
|
|
||||||
## Contributing to this collection
|
## Contributing to this collection
|
||||||
|
|
||||||
The content of this collection is made by good people just like you, a community of individuals collaborating on making the world better through developing automation software.
|
If you want to develop new content for this collection or improve what is already here, the easiest way to work on the collection is to clone it into one of the configured [`COLLECTIONS_PATH`](https://docs.ansible.com/ansible/latest/reference_appendices/config.html#collections-paths), and work on it there.
|
||||||
|
|
||||||
We are actively accepting new contributors.
|
|
||||||
|
|
||||||
All types of contributions are very welcome.
|
|
||||||
|
|
||||||
You don't know how to start? Refer to our [contribution guide](https://github.com/ansible-collections/community.general/blob/main/CONTRIBUTING.md)!
|
|
||||||
|
|
||||||
The current maintainers are listed in the [commit-rights.md](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md#people) file. If you have questions or need help, feel free to mention them in the proposals.
|
|
||||||
|
|
||||||
You can find more information in the [developer guide for collections](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#contributing-to-collections), and in the [Ansible Community Guide](https://docs.ansible.com/ansible/latest/community/index.html).
|
You can find more information in the [developer guide for collections](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#contributing-to-collections), and in the [Ansible Community Guide](https://docs.ansible.com/ansible/latest/community/index.html).
|
||||||
|
|
||||||
Also for some notes specific to this collection see [our CONTRIBUTING documentation](https://github.com/ansible-collections/community.general/blob/main/CONTRIBUTING.md).
|
|
||||||
|
|
||||||
### Running tests
|
### Running tests
|
||||||
|
|
||||||
See [here](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#testing-collections).
|
See [here](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#testing-collections).
|
||||||
|
|
||||||
## Collection maintenance
|
### Communication
|
||||||
|
|
||||||
To learn how to maintain / become a maintainer of this collection, refer to:
|
We have a dedicated Working Group for Ansible development.
|
||||||
|
|
||||||
* [Committer guidelines](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md).
|
You can find other people interested on the following Freenode IRC channels -
|
||||||
* [Maintainer guidelines](https://github.com/ansible/community-docs/blob/main/maintaining.rst).
|
- `#ansible` - For general use questions and support.
|
||||||
|
- `#ansible-devel` - For discussions on developer topics and code related to features or bugs.
|
||||||
It is necessary for maintainers of this collection to be subscribed to:
|
- `#ansible-community` - For discussions on community topics and community meetings.
|
||||||
|
|
||||||
* The collection itself (the `Watch` button → `All Activity` in the upper right corner of the repository's homepage).
|
|
||||||
* The "Changes Impacting Collection Contributors and Maintainers" [issue](https://github.com/ansible-collections/overview/issues/45).
|
|
||||||
|
|
||||||
They also should be subscribed to Ansible's [The Bullhorn newsletter](https://docs.ansible.com/ansible/devel/community/communication.html#the-bullhorn).
|
|
||||||
|
|
||||||
## Communication
|
|
||||||
|
|
||||||
We announce important development changes and releases through Ansible's [The Bullhorn newsletter](https://eepurl.com/gZmiEP). If you are a collection developer, be sure you are subscribed.
|
|
||||||
|
|
||||||
Join us in the `#ansible` (general use questions and support), `#ansible-community` (community and collection development questions), and other [IRC channels](https://docs.ansible.com/ansible/devel/community/communication.html#irc-channels) on [Libera.chat](https://libera.chat).
|
|
||||||
|
|
||||||
We take part in the global quarterly [Ansible Contributor Summit](https://github.com/ansible/community/wiki/Contributor-Summit) virtually or in-person. Track [The Bullhorn newsletter](https://eepurl.com/gZmiEP) and join us.
|
|
||||||
|
|
||||||
For more information about communities, meetings and agendas see [Community Wiki](https://github.com/ansible/community/wiki/Community).
|
For more information about communities, meetings and agendas see [Community Wiki](https://github.com/ansible/community/wiki/Community).
|
||||||
|
|
||||||
For more information about communication, refer to Ansible's the [Communication guide](https://docs.ansible.com/ansible/devel/community/communication.html).
|
For more information about [communication](https://docs.ansible.com/ansible/latest/community/communication.html)
|
||||||
|
|
||||||
## Publishing New Version
|
### Publishing New Version
|
||||||
|
|
||||||
See the [Releasing guidelines](https://github.com/ansible/community-docs/blob/main/releasing_collections.rst) to learn how to release this collection.
|
Basic instructions without release branches:
|
||||||
|
|
||||||
|
1. Create `changelogs/fragments/<version>.yml` with `release_summary:` section (which must be a string, not a list).
|
||||||
|
2. Run `antsibull-changelog release --collection-flatmap yes`
|
||||||
|
3. Make sure `CHANGELOG.rst` and `changelogs/changelog.yaml` are added to git, and the deleted fragments have been removed.
|
||||||
|
4. Tag the commit with `<version>`. Push changes and tag to the main repository.
|
||||||
|
|
||||||
## Release notes
|
## Release notes
|
||||||
|
|
||||||
See the [changelog](https://github.com/ansible-collections/community.general/blob/stable-5/CHANGELOG.rst).
|
See the [changelog](https://github.com/ansible-collections/community.general/blob/main/CHANGELOG.rst).
|
||||||
|
|
||||||
## Roadmap
|
## Roadmap
|
||||||
|
|
||||||
In general, we plan to release a major version every six months, and minor versions every two months. Major versions can contain breaking changes, while minor versions only contain new features and bugfixes.
|
See [this issue](https://github.com/ansible-collections/community.general/issues/582) for information on releasing, versioning and deprecation.
|
||||||
|
|
||||||
See [this issue](https://github.com/ansible-collections/community.general/issues/582) for information on releasing, versioning, and deprecation.
|
In general, we plan to release a major version every six months, and minor versions every two months. Major versions can contain breaking changes, while minor versions only contain new features and bugfixes.
|
||||||
|
|
||||||
## More information
|
## More information
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,74 +0,0 @@
|
|||||||
Committers Guidelines for community.general
|
|
||||||
===========================================
|
|
||||||
|
|
||||||
This document is based on the [Ansible committer guidelines](https://github.com/ansible/ansible/blob/b57444af14062ec96e0af75fdfc2098c74fe2d9a/docs/docsite/rst/community/committer_guidelines.rst) ([latest version](https://docs.ansible.com/ansible/devel/community/committer_guidelines.html)).
|
|
||||||
|
|
||||||
These are the guidelines for people with commit privileges on the Ansible Community General Collection GitHub repository. Please read the guidelines before you commit.
|
|
||||||
|
|
||||||
These guidelines apply to everyone. At the same time, this is NOT a process document. So just use good judgment. You have been given commit access because we trust your judgment.
|
|
||||||
|
|
||||||
That said, use the trust wisely.
|
|
||||||
|
|
||||||
If you abuse the trust and break components and builds, and so on, the trust level falls and you may be asked not to commit or you may lose your commit privileges.
|
|
||||||
|
|
||||||
Our workflow on GitHub
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
As a committer, you may already know this, but our workflow forms a lot of our team policies. Please ensure you are aware of the following workflow steps:
|
|
||||||
|
|
||||||
* Fork the repository upon which you want to do some work to your own personal repository
|
|
||||||
* Work on the specific branch upon which you need to commit
|
|
||||||
* Create a Pull Request back to the collection repository and await reviews
|
|
||||||
* Adjust code as necessary based on the Comments provided
|
|
||||||
* Ask someone from the other committers to do a final review and merge
|
|
||||||
|
|
||||||
Sometimes, committers merge their own pull requests. This section is a set of guidelines. If you are changing a comma in a doc or making a very minor change, you can use your best judgement. This is another trust thing. The process is critical for any major change, but for little things or getting something done quickly, use your best judgement and make sure people on the team are aware of your work.
|
|
||||||
|
|
||||||
Roles
|
|
||||||
-----
|
|
||||||
* Release managers: Merge pull requests to `stable-X` branches, create tags to do releases.
|
|
||||||
* Committers: Fine to do PRs for most things, but we should have a timebox. Hanging PRs may merge on the judgement of these devs.
|
|
||||||
* Module maintainers: Module maintainers own specific modules and have indirect commit access through the current module PR mechanisms. This is primary [ansibullbot](https://github.com/ansibullbot)'s `shipit` mechanism.
|
|
||||||
|
|
||||||
General rules
|
|
||||||
-------------
|
|
||||||
Individuals with direct commit access to this collection repository are entrusted with powers that allow them to do a broad variety of things--probably more than we can write down. Rather than rules, treat these as general *guidelines*, individuals with this power are expected to use their best judgement.
|
|
||||||
|
|
||||||
* Do NOTs:
|
|
||||||
|
|
||||||
- Do not commit directly.
|
|
||||||
- Do not merge your own PRs. Someone else should have a chance to review and approve the PR merge. You have a small amount of leeway here for very minor changes.
|
|
||||||
- Do not forget about non-standard / alternate environments. Consider the alternatives. Yes, people have bad/unusual/strange environments (like binaries from multiple init systems installed), but they are the ones who need us the most.
|
|
||||||
- Do not drag your community team members down. Discuss the technical merits of any pull requests you review. Avoid negativity and personal comments. For more guidance on being a good community member, read the [Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
|
|
||||||
- Do not forget about the maintenance burden. High-maintenance features may not be worth adding.
|
|
||||||
- Do not break playbooks. Always keep backwards compatibility in mind.
|
|
||||||
- Do not forget to keep it simple. Complexity breeds all kinds of problems.
|
|
||||||
- Do not merge to branches other than `main`, especially not to `stable-X`, if you do not have explicit permission to do so.
|
|
||||||
- Do not create tags. Tags are used in the release process, and should only be created by the people responsible for managing the stable branches.
|
|
||||||
|
|
||||||
* Do:
|
|
||||||
|
|
||||||
- Squash, avoid merges whenever possible, use GitHub's squash commits or cherry pick if needed (bisect thanks you).
|
|
||||||
- Be active. Committers who have no activity on the project (through merges, triage, commits, and so on) will have their permissions suspended.
|
|
||||||
- Consider backwards compatibility (goes back to "do not break existing playbooks").
|
|
||||||
- Write tests. PRs with tests are looked at with more priority than PRs without tests that should have them included. While not all changes require tests, be sure to add them for bug fixes or functionality changes.
|
|
||||||
- Discuss with other committers, specially when you are unsure of something.
|
|
||||||
- Document! If your PR is a new feature or a change to behavior, make sure you've updated all associated documentation or have notified the right people to do so.
|
|
||||||
- Consider scope, sometimes a fix can be generalized.
|
|
||||||
- Keep it simple, then things are maintainable, debuggable and intelligible.
|
|
||||||
|
|
||||||
Committers are expected to continue to follow the same community and contribution guidelines followed by the rest of the Ansible community.
|
|
||||||
|
|
||||||
|
|
||||||
People
|
|
||||||
------
|
|
||||||
|
|
||||||
Individuals who have been asked to become a part of this group have generally been contributing in significant ways to the community.general collection for some time. Should they agree, they are requested to add their names and GitHub IDs to this file, in the section below, through a pull request. Doing so indicates that these individuals agree to act in the ways that their fellow committers trust that they will act.
|
|
||||||
|
|
||||||
| Name | GitHub ID | IRC Nick | Other |
|
|
||||||
| ------------------- | -------------------- | ------------------ | -------------------- |
|
|
||||||
| Alexei Znamensky | russoz | russoz | |
|
|
||||||
| Andrew Klychkov | andersson007 | andersson007_ | |
|
|
||||||
| Andrew Pantuso | Ajpantuso | ajpantuso | |
|
|
||||||
| Felix Fontein | felixfontein | felixfontein | |
|
|
||||||
| John R Barker | gundalow | gundalow | |
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
---
|
|
||||||
sections:
|
|
||||||
- title: Guides
|
|
||||||
toctree:
|
|
||||||
- filter_guide
|
|
||||||
- test_guide
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
list1:
|
|
||||||
- name: foo
|
|
||||||
extra: true
|
|
||||||
- name: bar
|
|
||||||
extra: false
|
|
||||||
- name: meh
|
|
||||||
extra: true
|
|
||||||
|
|
||||||
list2:
|
|
||||||
- name: foo
|
|
||||||
path: /foo
|
|
||||||
- name: baz
|
|
||||||
path: /baz
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
list1:
|
|
||||||
- name: myname01
|
|
||||||
param01:
|
|
||||||
x: default_value
|
|
||||||
y: default_value
|
|
||||||
list:
|
|
||||||
- default_value
|
|
||||||
- name: myname02
|
|
||||||
param01: [1, 1, 2, 3]
|
|
||||||
|
|
||||||
list2:
|
|
||||||
- name: myname01
|
|
||||||
param01:
|
|
||||||
y: patch_value
|
|
||||||
z: patch_value
|
|
||||||
list:
|
|
||||||
- patch_value
|
|
||||||
- name: myname02
|
|
||||||
param01: [3, 4, 4, {key: value}]
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
---
|
|
||||||
- name: 1. Merge two lists by common attribute 'name'
|
|
||||||
include_vars:
|
|
||||||
dir: example-001_vars
|
|
||||||
- debug:
|
|
||||||
var: list3
|
|
||||||
when: debug|d(false)|bool
|
|
||||||
- template:
|
|
||||||
src: list3.out.j2
|
|
||||||
dest: example-001.out
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
../default-common.yml
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
list3: "{{ list1|
|
|
||||||
community.general.lists_mergeby(list2, 'name') }}"
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
---
|
|
||||||
- name: 2. Merge two lists by common attribute 'name'
|
|
||||||
include_vars:
|
|
||||||
dir: example-002_vars
|
|
||||||
- debug:
|
|
||||||
var: list3
|
|
||||||
when: debug|d(false)|bool
|
|
||||||
- template:
|
|
||||||
src: list3.out.j2
|
|
||||||
dest: example-002.out
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
../default-common.yml
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
list3: "{{ [list1, list2]|
|
|
||||||
community.general.lists_mergeby('name') }}"
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
---
|
|
||||||
- name: 3. Merge recursive by 'name', replace lists (default)
|
|
||||||
include_vars:
|
|
||||||
dir: example-003_vars
|
|
||||||
- debug:
|
|
||||||
var: list3
|
|
||||||
when: debug|d(false)|bool
|
|
||||||
- template:
|
|
||||||
src: list3.out.j2
|
|
||||||
dest: example-003.out
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
../default-recursive-true.yml
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
list3: "{{ [list1, list2]|
|
|
||||||
community.general.lists_mergeby('name',
|
|
||||||
recursive=true) }}"
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
---
|
|
||||||
- name: 4. Merge recursive by 'name', keep lists
|
|
||||||
include_vars:
|
|
||||||
dir: example-004_vars
|
|
||||||
- debug:
|
|
||||||
var: list3
|
|
||||||
when: debug|d(false)|bool
|
|
||||||
- template:
|
|
||||||
src: list3.out.j2
|
|
||||||
dest: example-004.out
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
../default-recursive-true.yml
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
list3: "{{ [list1, list2]|
|
|
||||||
community.general.lists_mergeby('name',
|
|
||||||
recursive=true,
|
|
||||||
list_merge='keep') }}"
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
---
|
|
||||||
- name: 5. Merge recursive by 'name', append lists
|
|
||||||
include_vars:
|
|
||||||
dir: example-005_vars
|
|
||||||
- debug:
|
|
||||||
var: list3
|
|
||||||
when: debug|d(false)|bool
|
|
||||||
- template:
|
|
||||||
src: list3.out.j2
|
|
||||||
dest: example-005.out
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
../default-recursive-true.yml
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
list3: "{{ [list1, list2]|
|
|
||||||
community.general.lists_mergeby('name',
|
|
||||||
recursive=true,
|
|
||||||
list_merge='append') }}"
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
---
|
|
||||||
- name: 6. Merge recursive by 'name', prepend lists
|
|
||||||
include_vars:
|
|
||||||
dir: example-006_vars
|
|
||||||
- debug:
|
|
||||||
var: list3
|
|
||||||
when: debug|d(false)|bool
|
|
||||||
- template:
|
|
||||||
src: list3.out.j2
|
|
||||||
dest: example-006.out
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
../default-recursive-true.yml
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
list3: "{{ [list1, list2]|
|
|
||||||
community.general.lists_mergeby('name',
|
|
||||||
recursive=true,
|
|
||||||
list_merge='prepend') }}"
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
---
|
|
||||||
- name: 7. Merge recursive by 'name', append lists 'remove present'
|
|
||||||
include_vars:
|
|
||||||
dir: example-007_vars
|
|
||||||
- debug:
|
|
||||||
var: list3
|
|
||||||
when: debug|d(false)|bool
|
|
||||||
- template:
|
|
||||||
src: list3.out.j2
|
|
||||||
dest: example-007.out
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
../default-recursive-true.yml
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
list3: "{{ [list1, list2]|
|
|
||||||
community.general.lists_mergeby('name',
|
|
||||||
recursive=true,
|
|
||||||
list_merge='append_rp') }}"
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
---
|
|
||||||
- name: 8. Merge recursive by 'name', prepend lists 'remove present'
|
|
||||||
include_vars:
|
|
||||||
dir: example-008_vars
|
|
||||||
- debug:
|
|
||||||
var: list3
|
|
||||||
when: debug|d(false)|bool
|
|
||||||
- template:
|
|
||||||
src: list3.out.j2
|
|
||||||
dest: example-008.out
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
../default-recursive-true.yml
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
list3: "{{ [list1, list2]|
|
|
||||||
community.general.lists_mergeby('name',
|
|
||||||
recursive=true,
|
|
||||||
list_merge='prepend_rp') }}"
|
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
---
|
|
||||||
examples:
|
|
||||||
- label: 'In the example below the lists are merged by the attribute ``name``:'
|
|
||||||
file: example-001_vars/list3.yml
|
|
||||||
lang: 'yaml+jinja'
|
|
||||||
- label: 'This produces:'
|
|
||||||
file: example-001.out
|
|
||||||
lang: 'yaml'
|
|
||||||
- label: 'It is possible to use a list of lists as an input of the filter:'
|
|
||||||
file: example-002_vars/list3.yml
|
|
||||||
lang: 'yaml+jinja'
|
|
||||||
- label: 'This produces the same result as in the previous example:'
|
|
||||||
file: example-002.out
|
|
||||||
lang: 'yaml'
|
|
||||||
- label: 'Example ``list_merge=replace`` (default):'
|
|
||||||
file: example-003_vars/list3.yml
|
|
||||||
lang: 'yaml+jinja'
|
|
||||||
- label: 'This produces:'
|
|
||||||
file: example-003.out
|
|
||||||
lang: 'yaml'
|
|
||||||
- label: 'Example ``list_merge=keep``:'
|
|
||||||
file: example-004_vars/list3.yml
|
|
||||||
lang: 'yaml+jinja'
|
|
||||||
- label: 'This produces:'
|
|
||||||
file: example-004.out
|
|
||||||
lang: 'yaml'
|
|
||||||
- label: 'Example ``list_merge=append``:'
|
|
||||||
file: example-005_vars/list3.yml
|
|
||||||
lang: 'yaml+jinja'
|
|
||||||
- label: 'This produces:'
|
|
||||||
file: example-005.out
|
|
||||||
lang: 'yaml'
|
|
||||||
- label: 'Example ``list_merge=prepend``:'
|
|
||||||
file: example-006_vars/list3.yml
|
|
||||||
lang: 'yaml+jinja'
|
|
||||||
- label: 'This produces:'
|
|
||||||
file: example-006.out
|
|
||||||
lang: 'yaml'
|
|
||||||
- label: 'Example ``list_merge=append_rp``:'
|
|
||||||
file: example-007_vars/list3.yml
|
|
||||||
lang: 'yaml+jinja'
|
|
||||||
- label: 'This produces:'
|
|
||||||
file: example-007.out
|
|
||||||
lang: 'yaml'
|
|
||||||
- label: 'Example ``list_merge=prepend_rp``:'
|
|
||||||
file: example-008_vars/list3.yml
|
|
||||||
lang: 'yaml+jinja'
|
|
||||||
- label: 'This produces:'
|
|
||||||
file: example-008.out
|
|
||||||
lang: 'yaml'
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
{% for i in examples %}
|
|
||||||
{{ i.label }}
|
|
||||||
|
|
||||||
.. code-block:: {{ i.lang }}
|
|
||||||
|
|
||||||
{{ lookup('file', i.file)|indent(2) }}
|
|
||||||
|
|
||||||
{% endfor %}
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
Merging lists of dictionaries
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
If you have two or more lists of dictionaries and want to combine them into a list of merged dictionaries, where the dictionaries are merged by an attribute, you can use the ``lists_mergeby`` filter.
|
|
||||||
|
|
||||||
.. note:: The output of the examples in this section use the YAML callback plugin. Quoting: "Ansible output that can be quite a bit easier to read than the default JSON formatting." See :ref:`the documentation for the community.general.yaml callback plugin <ansible_collections.community.general.yaml_callback>`.
|
|
||||||
|
|
||||||
Let us use the lists below in the following examples:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
{{ lookup('file', 'default-common.yml')|indent(2) }}
|
|
||||||
|
|
||||||
{% for i in examples[0:2] %}
|
|
||||||
{{ i.label }}
|
|
||||||
|
|
||||||
.. code-block:: {{ i.lang }}
|
|
||||||
|
|
||||||
{{ lookup('file', i.file)|indent(2) }}
|
|
||||||
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
.. versionadded:: 2.0.0
|
|
||||||
|
|
||||||
{% for i in examples[2:4] %}
|
|
||||||
{{ i.label }}
|
|
||||||
|
|
||||||
.. code-block:: {{ i.lang }}
|
|
||||||
|
|
||||||
{{ lookup('file', i.file)|indent(2) }}
|
|
||||||
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
The filter also accepts two optional parameters: ``recursive`` and ``list_merge``. These parameters are only supported when used with ansible-base 2.10 or ansible-core, but not with Ansible 2.9. This is available since community.general 4.4.0.
|
|
||||||
|
|
||||||
**recursive**
|
|
||||||
Is a boolean, default to ``False``. Should the ``community.general.lists_mergeby`` recursively merge nested hashes. Note: It does not depend on the value of the ``hash_behaviour`` setting in ``ansible.cfg``.
|
|
||||||
|
|
||||||
**list_merge**
|
|
||||||
Is a string, its possible values are ``replace`` (default), ``keep``, ``append``, ``prepend``, ``append_rp`` or ``prepend_rp``. It modifies the behaviour of ``community.general.lists_mergeby`` when the hashes to merge contain arrays/lists.
|
|
||||||
|
|
||||||
The examples below set ``recursive=true`` and display the differences among all six options of ``list_merge``. Functionality of the parameters is exactly the same as in the filter ``combine``. See :ref:`Combining hashes/dictionaries <combine_filter>` to learn details about these options.
|
|
||||||
|
|
||||||
Let us use the lists below in the following examples
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
{{ lookup('file', 'default-recursive-true.yml')|indent(2) }}
|
|
||||||
|
|
||||||
{% for i in examples[4:16] %}
|
|
||||||
{{ i.label }}
|
|
||||||
|
|
||||||
.. code-block:: {{ i.lang }}
|
|
||||||
|
|
||||||
{{ lookup('file', i.file)|indent(2) }}
|
|
||||||
|
|
||||||
{% endfor %}
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
list3:
|
|
||||||
{{ list3|to_nice_yaml(indent=0) }}
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
---
|
|
||||||
|
|
||||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
|
||||||
# 1) Run all examples and create example-XXX.out
|
|
||||||
# shell> ansible-playbook playbook.yml -e examples=true
|
|
||||||
#
|
|
||||||
# 2) Optionally, for testing, create examples_all.rst
|
|
||||||
# shell> ansible-playbook playbook.yml -e examples_all=true
|
|
||||||
#
|
|
||||||
# 3) Create docs REST files
|
|
||||||
# shell> ansible-playbook playbook.yml -e merging_lists_of_dictionaries=true
|
|
||||||
#
|
|
||||||
# Notes:
|
|
||||||
# * Use YAML callback, e.g. set ANSIBLE_STDOUT_CALLBACK=community.general.yaml
|
|
||||||
# * Use sphinx-view to render and review the REST files
|
|
||||||
# shell> sphinx-view <path_to_helper>/examples_all.rst
|
|
||||||
# * Proofread and copy completed docs *.rst files into the directory rst.
|
|
||||||
# * Then delete the *.rst and *.out files from this directory. Do not
|
|
||||||
# add *.rst and *.out in this directory to the version control.
|
|
||||||
#
|
|
||||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
|
||||||
# community.general/docs/docsite/helper/lists_mergeby/playbook.yml
|
|
||||||
|
|
||||||
- hosts: localhost
|
|
||||||
gather_facts: false
|
|
||||||
tasks:
|
|
||||||
|
|
||||||
- block:
|
|
||||||
- import_tasks: example-001.yml
|
|
||||||
tags: t001
|
|
||||||
- import_tasks: example-002.yml
|
|
||||||
tags: t002
|
|
||||||
- import_tasks: example-003.yml
|
|
||||||
tags: t003
|
|
||||||
- import_tasks: example-004.yml
|
|
||||||
tags: t004
|
|
||||||
- import_tasks: example-005.yml
|
|
||||||
tags: t005
|
|
||||||
- import_tasks: example-006.yml
|
|
||||||
tags: t006
|
|
||||||
- import_tasks: example-007.yml
|
|
||||||
tags: t007
|
|
||||||
- import_tasks: example-008.yml
|
|
||||||
tags: t008
|
|
||||||
when: examples|d(false)|bool
|
|
||||||
|
|
||||||
- block:
|
|
||||||
- include_vars: examples.yml
|
|
||||||
- template:
|
|
||||||
src: examples_all.rst.j2
|
|
||||||
dest: examples_all.rst
|
|
||||||
when: examples_all|d(false)|bool
|
|
||||||
|
|
||||||
- block:
|
|
||||||
- include_vars: examples.yml
|
|
||||||
- template:
|
|
||||||
src: filter_guide_abstract_informations_merging_lists_of_dictionaries.rst.j2
|
|
||||||
dest: filter_guide_abstract_informations_merging_lists_of_dictionaries.rst
|
|
||||||
when: merging_lists_of_dictionaries|d(false)|bool
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
---
|
|
||||||
edit_on_github:
|
|
||||||
repository: ansible-collections/community.general
|
|
||||||
branch: main
|
|
||||||
path_prefix: ''
|
|
||||||
|
|
||||||
extra_links:
|
|
||||||
- description: Submit a bug report
|
|
||||||
url: https://github.com/ansible-collections/community.general/issues/new?assignees=&labels=&template=bug_report.yml
|
|
||||||
- description: Request a feature
|
|
||||||
url: https://github.com/ansible-collections/community.general/issues/new?assignees=&labels=&template=feature_request.yml
|
|
||||||
|
|
||||||
communication:
|
|
||||||
matrix_rooms:
|
|
||||||
- topic: General usage and support questions
|
|
||||||
room: '#users:ansible.im'
|
|
||||||
irc_channels:
|
|
||||||
- topic: General usage and support questions
|
|
||||||
network: Libera
|
|
||||||
channel: '#ansible'
|
|
||||||
mailing_lists:
|
|
||||||
- topic: Ansible Project List
|
|
||||||
url: https://groups.google.com/g/ansible-project
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
|
|
||||||
.. _ansible_collections.community.general.docsite.filter_guide:
|
|
||||||
|
|
||||||
community.general Filter Guide
|
|
||||||
==============================
|
|
||||||
|
|
||||||
The :ref:`community.general collection <plugins_in_community.general>` offers several useful filter plugins.
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 2
|
|
||||||
|
|
||||||
filter_guide_paths
|
|
||||||
filter_guide_abstract_informations
|
|
||||||
filter_guide_working_with_times
|
|
||||||
filter_guide_working_with_versions
|
|
||||||
filter_guide_creating_identifiers
|
|
||||||
filter_guide_conversions
|
|
||||||
filter_guide_selecting_json_data
|
|
||||||
filter_guide_working_with_unicode
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
Abstract transformations
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 1
|
|
||||||
|
|
||||||
filter_guide_abstract_informations_dictionaries
|
|
||||||
filter_guide_abstract_informations_grouping
|
|
||||||
filter_guide_abstract_informations_merging_lists_of_dictionaries
|
|
||||||
filter_guide_abstract_informations_counting_elements_in_sequence
|
|
||||||
@@ -1,77 +0,0 @@
|
|||||||
Counting elements in a sequence
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
The ``community.general.counter`` filter plugin allows you to count (hashable) elements in a sequence. Elements are returned as dictionary keys and their counts are stored as dictionary values.
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Count character occurrences in a string
|
|
||||||
debug:
|
|
||||||
msg: "{{ 'abccbaabca' | community.general.counter }}"
|
|
||||||
|
|
||||||
- name: Count items in a list
|
|
||||||
debug:
|
|
||||||
msg: "{{ ['car', 'car', 'bike', 'plane', 'bike'] | community.general.counter }}"
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: ansible-output
|
|
||||||
|
|
||||||
TASK [Count character occurrences in a string] ********************************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": {
|
|
||||||
"a": 4,
|
|
||||||
"b": 3,
|
|
||||||
"c": 3
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TASK [Count items in a list] **************************************************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": {
|
|
||||||
"bike": 2,
|
|
||||||
"car": 2,
|
|
||||||
"plane": 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
This plugin is useful for selecting resources based on current allocation:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Get ID of SCSI controller(s) with less than 4 disks attached and choose the one with the least disks
|
|
||||||
debug:
|
|
||||||
msg: >-
|
|
||||||
{{
|
|
||||||
( disks | dict2items | map(attribute='value.adapter') | list
|
|
||||||
| community.general.counter | dict2items
|
|
||||||
| rejectattr('value', '>=', 4) | sort(attribute='value') | first
|
|
||||||
).key
|
|
||||||
}}
|
|
||||||
vars:
|
|
||||||
disks:
|
|
||||||
sda:
|
|
||||||
adapter: scsi_1
|
|
||||||
sdb:
|
|
||||||
adapter: scsi_1
|
|
||||||
sdc:
|
|
||||||
adapter: scsi_1
|
|
||||||
sdd:
|
|
||||||
adapter: scsi_1
|
|
||||||
sde:
|
|
||||||
adapter: scsi_2
|
|
||||||
sdf:
|
|
||||||
adapter: scsi_3
|
|
||||||
sdg:
|
|
||||||
adapter: scsi_3
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: ansible-output
|
|
||||||
|
|
||||||
TASK [Get ID of SCSI controller(s) with less than 4 disks attached and choose the one with the least disks]
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": "scsi_2"
|
|
||||||
}
|
|
||||||
|
|
||||||
.. versionadded:: 4.3.0
|
|
||||||
@@ -1,119 +0,0 @@
|
|||||||
Dictionaries
|
|
||||||
^^^^^^^^^^^^
|
|
||||||
|
|
||||||
You can use the ``dict_kv`` filter to create a single-entry dictionary with ``value | community.general.dict_kv(key)``:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Create a single-entry dictionary
|
|
||||||
debug:
|
|
||||||
msg: "{{ myvar | community.general.dict_kv('thatsmyvar') }}"
|
|
||||||
vars:
|
|
||||||
myvar: myvalue
|
|
||||||
|
|
||||||
- name: Create a list of dictionaries where the 'server' field is taken from a list
|
|
||||||
debug:
|
|
||||||
msg: >-
|
|
||||||
{{ myservers | map('community.general.dict_kv', 'server')
|
|
||||||
| map('combine', common_config) }}
|
|
||||||
vars:
|
|
||||||
common_config:
|
|
||||||
type: host
|
|
||||||
database: all
|
|
||||||
myservers:
|
|
||||||
- server1
|
|
||||||
- server2
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: ansible-output
|
|
||||||
|
|
||||||
TASK [Create a single-entry dictionary] **************************************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": {
|
|
||||||
"thatsmyvar": "myvalue"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TASK [Create a list of dictionaries where the 'server' field is taken from a list] *******
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": [
|
|
||||||
{
|
|
||||||
"database": "all",
|
|
||||||
"server": "server1",
|
|
||||||
"type": "host"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"database": "all",
|
|
||||||
"server": "server2",
|
|
||||||
"type": "host"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
.. versionadded:: 2.0.0
|
|
||||||
|
|
||||||
If you need to convert a list of key-value pairs to a dictionary, you can use the ``dict`` function. Unfortunately, this function cannot be used with ``map``. For this, the ``community.general.dict`` filter can be used:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Create a dictionary with the dict function
|
|
||||||
debug:
|
|
||||||
msg: "{{ dict([[1, 2], ['a', 'b']]) }}"
|
|
||||||
|
|
||||||
- name: Create a dictionary with the community.general.dict filter
|
|
||||||
debug:
|
|
||||||
msg: "{{ [[1, 2], ['a', 'b']] | community.general.dict }}"
|
|
||||||
|
|
||||||
- name: Create a list of dictionaries with map and the community.general.dict filter
|
|
||||||
debug:
|
|
||||||
msg: >-
|
|
||||||
{{ values | map('zip', ['k1', 'k2', 'k3'])
|
|
||||||
| map('map', 'reverse')
|
|
||||||
| map('community.general.dict') }}
|
|
||||||
vars:
|
|
||||||
values:
|
|
||||||
- - foo
|
|
||||||
- 23
|
|
||||||
- a
|
|
||||||
- - bar
|
|
||||||
- 42
|
|
||||||
- b
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: ansible-output
|
|
||||||
|
|
||||||
TASK [Create a dictionary with the dict function] ****************************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": {
|
|
||||||
"1": 2,
|
|
||||||
"a": "b"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TASK [Create a dictionary with the community.general.dict filter] ************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": {
|
|
||||||
"1": 2,
|
|
||||||
"a": "b"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TASK [Create a list of dictionaries with map and the community.general.dict filter] ******
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": [
|
|
||||||
{
|
|
||||||
"k1": "foo",
|
|
||||||
"k2": 23,
|
|
||||||
"k3": "a"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"k1": "bar",
|
|
||||||
"k2": 42,
|
|
||||||
"k3": "b"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
.. versionadded:: 3.0.0
|
|
||||||
@@ -1,98 +0,0 @@
|
|||||||
Grouping
|
|
||||||
^^^^^^^^
|
|
||||||
|
|
||||||
If you have a list of dictionaries, the Jinja2 ``groupby`` filter allows to group the list by an attribute. This results in a list of ``(grouper, list)`` namedtuples, where ``list`` contains all dictionaries where the selected attribute equals ``grouper``. If you know that for every ``grouper``, there will be a most one entry in that list, you can use the ``community.general.groupby_as_dict`` filter to convert the original list into a dictionary which maps ``grouper`` to the corresponding dictionary.
|
|
||||||
|
|
||||||
One example is ``ansible_facts.mounts``, which is a list of dictionaries where each has one ``device`` element to indicate the device which is mounted. Therefore, ``ansible_facts.mounts | community.general.groupby_as_dict('device')`` is a dictionary mapping a device to the mount information:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Output mount facts grouped by device name
|
|
||||||
debug:
|
|
||||||
var: ansible_facts.mounts | community.general.groupby_as_dict('device')
|
|
||||||
|
|
||||||
- name: Output mount facts grouped by mount point
|
|
||||||
debug:
|
|
||||||
var: ansible_facts.mounts | community.general.groupby_as_dict('mount')
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: ansible-output
|
|
||||||
|
|
||||||
TASK [Output mount facts grouped by device name] ******************************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"ansible_facts.mounts | community.general.groupby_as_dict('device')": {
|
|
||||||
"/dev/sda1": {
|
|
||||||
"block_available": 2000,
|
|
||||||
"block_size": 4096,
|
|
||||||
"block_total": 2345,
|
|
||||||
"block_used": 345,
|
|
||||||
"device": "/dev/sda1",
|
|
||||||
"fstype": "ext4",
|
|
||||||
"inode_available": 500,
|
|
||||||
"inode_total": 512,
|
|
||||||
"inode_used": 12,
|
|
||||||
"mount": "/boot",
|
|
||||||
"options": "rw,relatime,data=ordered",
|
|
||||||
"size_available": 56821,
|
|
||||||
"size_total": 543210,
|
|
||||||
"uuid": "ab31cade-d9c1-484d-8482-8a4cbee5241a"
|
|
||||||
},
|
|
||||||
"/dev/sda2": {
|
|
||||||
"block_available": 1234,
|
|
||||||
"block_size": 4096,
|
|
||||||
"block_total": 12345,
|
|
||||||
"block_used": 11111,
|
|
||||||
"device": "/dev/sda2",
|
|
||||||
"fstype": "ext4",
|
|
||||||
"inode_available": 1111,
|
|
||||||
"inode_total": 1234,
|
|
||||||
"inode_used": 123,
|
|
||||||
"mount": "/",
|
|
||||||
"options": "rw,relatime",
|
|
||||||
"size_available": 42143,
|
|
||||||
"size_total": 543210,
|
|
||||||
"uuid": "abcdef01-2345-6789-0abc-def012345678"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TASK [Output mount facts grouped by mount point] ******************************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"ansible_facts.mounts | community.general.groupby_as_dict('mount')": {
|
|
||||||
"/": {
|
|
||||||
"block_available": 1234,
|
|
||||||
"block_size": 4096,
|
|
||||||
"block_total": 12345,
|
|
||||||
"block_used": 11111,
|
|
||||||
"device": "/dev/sda2",
|
|
||||||
"fstype": "ext4",
|
|
||||||
"inode_available": 1111,
|
|
||||||
"inode_total": 1234,
|
|
||||||
"inode_used": 123,
|
|
||||||
"mount": "/",
|
|
||||||
"options": "rw,relatime",
|
|
||||||
"size_available": 42143,
|
|
||||||
"size_total": 543210,
|
|
||||||
"uuid": "bdf50b7d-4859-40af-8665-c637ee7a7808"
|
|
||||||
},
|
|
||||||
"/boot": {
|
|
||||||
"block_available": 2000,
|
|
||||||
"block_size": 4096,
|
|
||||||
"block_total": 2345,
|
|
||||||
"block_used": 345,
|
|
||||||
"device": "/dev/sda1",
|
|
||||||
"fstype": "ext4",
|
|
||||||
"inode_available": 500,
|
|
||||||
"inode_total": 512,
|
|
||||||
"inode_used": 12,
|
|
||||||
"mount": "/boot",
|
|
||||||
"options": "rw,relatime,data=ordered",
|
|
||||||
"size_available": 56821,
|
|
||||||
"size_total": 543210,
|
|
||||||
"uuid": "ab31cade-d9c1-484d-8482-8a4cbee5241a"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.. versionadded: 3.0.0
|
|
||||||
@@ -1,292 +0,0 @@
|
|||||||
Merging lists of dictionaries
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
If you have two or more lists of dictionaries and want to combine them into a list of merged dictionaries, where the dictionaries are merged by an attribute, you can use the ``lists_mergeby`` filter.
|
|
||||||
|
|
||||||
.. note:: The output of the examples in this section use the YAML callback plugin. Quoting: "Ansible output that can be quite a bit easier to read than the default JSON formatting." See :ref:`the documentation for the community.general.yaml callback plugin <ansible_collections.community.general.yaml_callback>`.
|
|
||||||
|
|
||||||
Let us use the lists below in the following examples:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
list1:
|
|
||||||
- name: foo
|
|
||||||
extra: true
|
|
||||||
- name: bar
|
|
||||||
extra: false
|
|
||||||
- name: meh
|
|
||||||
extra: true
|
|
||||||
|
|
||||||
list2:
|
|
||||||
- name: foo
|
|
||||||
path: /foo
|
|
||||||
- name: baz
|
|
||||||
path: /baz
|
|
||||||
|
|
||||||
In the example below the lists are merged by the attribute ``name``:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
list3: "{{ list1|
|
|
||||||
community.general.lists_mergeby(list2, 'name') }}"
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
list3:
|
|
||||||
- extra: false
|
|
||||||
name: bar
|
|
||||||
- name: baz
|
|
||||||
path: /baz
|
|
||||||
- extra: true
|
|
||||||
name: foo
|
|
||||||
path: /foo
|
|
||||||
- extra: true
|
|
||||||
name: meh
|
|
||||||
|
|
||||||
|
|
||||||
.. versionadded:: 2.0.0
|
|
||||||
|
|
||||||
It is possible to use a list of lists as an input of the filter:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
list3: "{{ [list1, list2]|
|
|
||||||
community.general.lists_mergeby('name') }}"
|
|
||||||
|
|
||||||
This produces the same result as in the previous example:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
list3:
|
|
||||||
- extra: false
|
|
||||||
name: bar
|
|
||||||
- name: baz
|
|
||||||
path: /baz
|
|
||||||
- extra: true
|
|
||||||
name: foo
|
|
||||||
path: /foo
|
|
||||||
- extra: true
|
|
||||||
name: meh
|
|
||||||
|
|
||||||
|
|
||||||
The filter also accepts two optional parameters: ``recursive`` and ``list_merge``. These parameters are only supported when used with ansible-base 2.10 or ansible-core, but not with Ansible 2.9. This is available since community.general 4.4.0.
|
|
||||||
|
|
||||||
**recursive**
|
|
||||||
Is a boolean, default to ``False``. Should the ``community.general.lists_mergeby`` recursively merge nested hashes. Note: It does not depend on the value of the ``hash_behaviour`` setting in ``ansible.cfg``.
|
|
||||||
|
|
||||||
**list_merge**
|
|
||||||
Is a string, its possible values are ``replace`` (default), ``keep``, ``append``, ``prepend``, ``append_rp`` or ``prepend_rp``. It modifies the behaviour of ``community.general.lists_mergeby`` when the hashes to merge contain arrays/lists.
|
|
||||||
|
|
||||||
The examples below set ``recursive=true`` and display the differences among all six options of ``list_merge``. Functionality of the parameters is exactly the same as in the filter ``combine``. See :ref:`Combining hashes/dictionaries <combine_filter>` to learn details about these options.
|
|
||||||
|
|
||||||
Let us use the lists below in the following examples
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
list1:
|
|
||||||
- name: myname01
|
|
||||||
param01:
|
|
||||||
x: default_value
|
|
||||||
y: default_value
|
|
||||||
list:
|
|
||||||
- default_value
|
|
||||||
- name: myname02
|
|
||||||
param01: [1, 1, 2, 3]
|
|
||||||
|
|
||||||
list2:
|
|
||||||
- name: myname01
|
|
||||||
param01:
|
|
||||||
y: patch_value
|
|
||||||
z: patch_value
|
|
||||||
list:
|
|
||||||
- patch_value
|
|
||||||
- name: myname02
|
|
||||||
param01: [3, 4, 4, {key: value}]
|
|
||||||
|
|
||||||
Example ``list_merge=replace`` (default):
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
list3: "{{ [list1, list2]|
|
|
||||||
community.general.lists_mergeby('name',
|
|
||||||
recursive=true) }}"
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
list3:
|
|
||||||
- name: myname01
|
|
||||||
param01:
|
|
||||||
list:
|
|
||||||
- patch_value
|
|
||||||
x: default_value
|
|
||||||
y: patch_value
|
|
||||||
z: patch_value
|
|
||||||
- name: myname02
|
|
||||||
param01:
|
|
||||||
- 3
|
|
||||||
- 4
|
|
||||||
- 4
|
|
||||||
- key: value
|
|
||||||
|
|
||||||
Example ``list_merge=keep``:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
list3: "{{ [list1, list2]|
|
|
||||||
community.general.lists_mergeby('name',
|
|
||||||
recursive=true,
|
|
||||||
list_merge='keep') }}"
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
list3:
|
|
||||||
- name: myname01
|
|
||||||
param01:
|
|
||||||
list:
|
|
||||||
- default_value
|
|
||||||
x: default_value
|
|
||||||
y: patch_value
|
|
||||||
z: patch_value
|
|
||||||
- name: myname02
|
|
||||||
param01:
|
|
||||||
- 1
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
|
|
||||||
Example ``list_merge=append``:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
list3: "{{ [list1, list2]|
|
|
||||||
community.general.lists_mergeby('name',
|
|
||||||
recursive=true,
|
|
||||||
list_merge='append') }}"
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
list3:
|
|
||||||
- name: myname01
|
|
||||||
param01:
|
|
||||||
list:
|
|
||||||
- default_value
|
|
||||||
- patch_value
|
|
||||||
x: default_value
|
|
||||||
y: patch_value
|
|
||||||
z: patch_value
|
|
||||||
- name: myname02
|
|
||||||
param01:
|
|
||||||
- 1
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- 3
|
|
||||||
- 4
|
|
||||||
- 4
|
|
||||||
- key: value
|
|
||||||
|
|
||||||
Example ``list_merge=prepend``:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
list3: "{{ [list1, list2]|
|
|
||||||
community.general.lists_mergeby('name',
|
|
||||||
recursive=true,
|
|
||||||
list_merge='prepend') }}"
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
list3:
|
|
||||||
- name: myname01
|
|
||||||
param01:
|
|
||||||
list:
|
|
||||||
- patch_value
|
|
||||||
- default_value
|
|
||||||
x: default_value
|
|
||||||
y: patch_value
|
|
||||||
z: patch_value
|
|
||||||
- name: myname02
|
|
||||||
param01:
|
|
||||||
- 3
|
|
||||||
- 4
|
|
||||||
- 4
|
|
||||||
- key: value
|
|
||||||
- 1
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
|
|
||||||
Example ``list_merge=append_rp``:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
list3: "{{ [list1, list2]|
|
|
||||||
community.general.lists_mergeby('name',
|
|
||||||
recursive=true,
|
|
||||||
list_merge='append_rp') }}"
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
list3:
|
|
||||||
- name: myname01
|
|
||||||
param01:
|
|
||||||
list:
|
|
||||||
- default_value
|
|
||||||
- patch_value
|
|
||||||
x: default_value
|
|
||||||
y: patch_value
|
|
||||||
z: patch_value
|
|
||||||
- name: myname02
|
|
||||||
param01:
|
|
||||||
- 1
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- 3
|
|
||||||
- 4
|
|
||||||
- 4
|
|
||||||
- key: value
|
|
||||||
|
|
||||||
Example ``list_merge=prepend_rp``:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
list3: "{{ [list1, list2]|
|
|
||||||
community.general.lists_mergeby('name',
|
|
||||||
recursive=true,
|
|
||||||
list_merge='prepend_rp') }}"
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
list3:
|
|
||||||
- name: myname01
|
|
||||||
param01:
|
|
||||||
list:
|
|
||||||
- patch_value
|
|
||||||
- default_value
|
|
||||||
x: default_value
|
|
||||||
y: patch_value
|
|
||||||
z: patch_value
|
|
||||||
- name: myname02
|
|
||||||
param01:
|
|
||||||
- 3
|
|
||||||
- 4
|
|
||||||
- 4
|
|
||||||
- key: value
|
|
||||||
- 1
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
|
|
||||||
@@ -1,108 +0,0 @@
|
|||||||
Conversions
|
|
||||||
-----------
|
|
||||||
|
|
||||||
Parsing CSV files
|
|
||||||
^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Ansible offers the :ref:`community.general.read_csv module <ansible_collections.community.general.read_csv_module>` to read CSV files. Sometimes you need to convert strings to CSV files instead. For this, the ``from_csv`` filter exists.
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: "Parse CSV from string"
|
|
||||||
debug:
|
|
||||||
msg: "{{ csv_string | community.general.from_csv }}"
|
|
||||||
vars:
|
|
||||||
csv_string: |
|
|
||||||
foo,bar,baz
|
|
||||||
1,2,3
|
|
||||||
you,this,then
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: ansible-output
|
|
||||||
|
|
||||||
TASK [Parse CSV from string] **************************************************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": [
|
|
||||||
{
|
|
||||||
"bar": "2",
|
|
||||||
"baz": "3",
|
|
||||||
"foo": "1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"bar": "this",
|
|
||||||
"baz": "then",
|
|
||||||
"foo": "you"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
The ``from_csv`` filter has several keyword arguments to control its behavior:
|
|
||||||
|
|
||||||
:dialect: Dialect of the CSV file. Default is ``excel``. Other possible choices are ``excel-tab`` and ``unix``. If one of ``delimiter``, ``skipinitialspace`` or ``strict`` is specified, ``dialect`` is ignored.
|
|
||||||
:fieldnames: A set of column names to use. If not provided, the first line of the CSV is assumed to contain the column names.
|
|
||||||
:delimiter: Sets the delimiter to use. Default depends on the dialect used.
|
|
||||||
:skipinitialspace: Set to ``true`` to ignore space directly after the delimiter. Default depends on the dialect used (usually ``false``).
|
|
||||||
:strict: Set to ``true`` to error out on invalid CSV input.
|
|
||||||
|
|
||||||
.. versionadded: 3.0.0
|
|
||||||
|
|
||||||
Converting to JSON
|
|
||||||
^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
`JC <https://pypi.org/project/jc/>`_ is a CLI tool and Python library which allows to interpret output of various CLI programs as JSON. It is also available as a filter in community.general. This filter needs the `jc Python library <https://pypi.org/project/jc/>`_ installed on the controller.
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Run 'ls' to list files in /
|
|
||||||
command: ls /
|
|
||||||
register: result
|
|
||||||
|
|
||||||
- name: Parse the ls output
|
|
||||||
debug:
|
|
||||||
msg: "{{ result.stdout | community.general.jc('ls') }}"
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: ansible-output
|
|
||||||
|
|
||||||
TASK [Run 'ls' to list files in /] ********************************************************
|
|
||||||
changed: [localhost]
|
|
||||||
|
|
||||||
TASK [Parse the ls output] ****************************************************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": [
|
|
||||||
{
|
|
||||||
"filename": "bin"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"filename": "boot"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"filename": "dev"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"filename": "etc"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"filename": "home"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"filename": "lib"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"filename": "proc"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"filename": "root"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"filename": "run"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"filename": "tmp"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
.. versionadded: 2.0.0
|
|
||||||
@@ -1,80 +0,0 @@
|
|||||||
Creating identifiers
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
The following filters allow to create identifiers.
|
|
||||||
|
|
||||||
Hashids
|
|
||||||
^^^^^^^
|
|
||||||
|
|
||||||
`Hashids <https://hashids.org/>`_ allow to convert sequences of integers to short unique string identifiers. This filter needs the `hashids Python library <https://pypi.org/project/hashids/>`_ installed on the controller.
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: "Create hashid"
|
|
||||||
debug:
|
|
||||||
msg: "{{ [1234, 5, 6] | community.general.hashids_encode }}"
|
|
||||||
|
|
||||||
- name: "Decode hashid"
|
|
||||||
debug:
|
|
||||||
msg: "{{ 'jm2Cytn' | community.general.hashids_decode }}"
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: ansible-output
|
|
||||||
|
|
||||||
TASK [Create hashid] **********************************************************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": "jm2Cytn"
|
|
||||||
}
|
|
||||||
|
|
||||||
TASK [Decode hashid] **********************************************************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": [
|
|
||||||
1234,
|
|
||||||
5,
|
|
||||||
6
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
The hashids filters accept keyword arguments to allow fine-tuning the hashids generated:
|
|
||||||
|
|
||||||
:salt: String to use as salt when hashing.
|
|
||||||
:alphabet: String of 16 or more unique characters to produce a hash.
|
|
||||||
:min_length: Minimum length of hash produced.
|
|
||||||
|
|
||||||
.. versionadded: 3.0.0
|
|
||||||
|
|
||||||
Random MACs
|
|
||||||
^^^^^^^^^^^
|
|
||||||
|
|
||||||
You can use the ``random_mac`` filter to complete a partial `MAC address <https://en.wikipedia.org/wiki/MAC_address>`_ to a random 6-byte MAC address.
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: "Create a random MAC starting with ff:"
|
|
||||||
debug:
|
|
||||||
msg: "{{ 'FF' | community.general.random_mac }}"
|
|
||||||
|
|
||||||
- name: "Create a random MAC starting with 00:11:22:"
|
|
||||||
debug:
|
|
||||||
msg: "{{ '00:11:22' | community.general.random_mac }}"
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: ansible-output
|
|
||||||
|
|
||||||
TASK [Create a random MAC starting with ff:] **********************************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": "ff:69:d3:78:7f:b4"
|
|
||||||
}
|
|
||||||
|
|
||||||
TASK [Create a random MAC starting with 00:11:22:] ****************************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": "00:11:22:71:5d:3b"
|
|
||||||
}
|
|
||||||
|
|
||||||
You can also initialize the random number generator from a seed to create random-but-idempotent MAC addresses:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
"{{ '52:54:00' | community.general.random_mac(seed=inventory_hostname) }}"
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
Paths
|
|
||||||
-----
|
|
||||||
|
|
||||||
The ``path_join`` filter has been added in ansible-base 2.10. If you want to use this filter, but also need to support Ansible 2.9, you can use ``community.general``'s ``path_join`` shim, ``community.general.path_join``. This filter redirects to ``path_join`` for ansible-base 2.10 and ansible-core 2.11 or newer, and re-implements the filter for Ansible 2.9.
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
# ansible-base 2.10 or newer:
|
|
||||||
path: {{ ('/etc', path, 'subdir', file) | path_join }}
|
|
||||||
|
|
||||||
# Also works with Ansible 2.9:
|
|
||||||
path: {{ ('/etc', path, 'subdir', file) | community.general.path_join }}
|
|
||||||
|
|
||||||
.. versionadded:: 3.0.0
|
|
||||||
@@ -1,144 +0,0 @@
|
|||||||
.. _ansible_collections.community.general.docsite.json_query_filter:
|
|
||||||
|
|
||||||
Selecting JSON data: JSON queries
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
To select a single element or a data subset from a complex data structure in JSON format (for example, Ansible facts), use the ``json_query`` filter. The ``json_query`` filter lets you query a complex JSON structure and iterate over it using a loop structure.
|
|
||||||
|
|
||||||
.. note:: You must manually install the **jmespath** dependency on the Ansible controller before using this filter. This filter is built upon **jmespath**, and you can use the same syntax. For examples, see `jmespath examples <http://jmespath.org/examples.html>`_.
|
|
||||||
|
|
||||||
Consider this data structure:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
{
|
|
||||||
"domain_definition": {
|
|
||||||
"domain": {
|
|
||||||
"cluster": [
|
|
||||||
{
|
|
||||||
"name": "cluster1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "cluster2"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"server": [
|
|
||||||
{
|
|
||||||
"name": "server11",
|
|
||||||
"cluster": "cluster1",
|
|
||||||
"port": "8080"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "server12",
|
|
||||||
"cluster": "cluster1",
|
|
||||||
"port": "8090"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "server21",
|
|
||||||
"cluster": "cluster2",
|
|
||||||
"port": "9080"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "server22",
|
|
||||||
"cluster": "cluster2",
|
|
||||||
"port": "9090"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"library": [
|
|
||||||
{
|
|
||||||
"name": "lib1",
|
|
||||||
"target": "cluster1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "lib2",
|
|
||||||
"target": "cluster2"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
To extract all clusters from this structure, you can use the following query:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Display all cluster names
|
|
||||||
ansible.builtin.debug:
|
|
||||||
var: item
|
|
||||||
loop: "{{ domain_definition | community.general.json_query('domain.cluster[*].name') }}"
|
|
||||||
|
|
||||||
To extract all server names:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Display all server names
|
|
||||||
ansible.builtin.debug:
|
|
||||||
var: item
|
|
||||||
loop: "{{ domain_definition | community.general.json_query('domain.server[*].name') }}"
|
|
||||||
|
|
||||||
To extract ports from cluster1:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Display all ports from cluster1
|
|
||||||
ansible.builtin.debug:
|
|
||||||
var: item
|
|
||||||
loop: "{{ domain_definition | community.general.json_query(server_name_cluster1_query) }}"
|
|
||||||
vars:
|
|
||||||
server_name_cluster1_query: "domain.server[?cluster=='cluster1'].port"
|
|
||||||
|
|
||||||
.. note:: You can use a variable to make the query more readable.
|
|
||||||
|
|
||||||
To print out the ports from cluster1 in a comma separated string:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Display all ports from cluster1 as a string
|
|
||||||
ansible.builtin.debug:
|
|
||||||
msg: "{{ domain_definition | community.general.json_query('domain.server[?cluster==`cluster1`].port') | join(', ') }}"
|
|
||||||
|
|
||||||
.. note:: In the example above, quoting literals using backticks avoids escaping quotes and maintains readability.
|
|
||||||
|
|
||||||
You can use YAML `single quote escaping <https://yaml.org/spec/current.html#id2534365>`_:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Display all ports from cluster1
|
|
||||||
ansible.builtin.debug:
|
|
||||||
var: item
|
|
||||||
loop: "{{ domain_definition | community.general.json_query('domain.server[?cluster==''cluster1''].port') }}"
|
|
||||||
|
|
||||||
.. note:: Escaping single quotes within single quotes in YAML is done by doubling the single quote.
|
|
||||||
|
|
||||||
To get a hash map with all ports and names of a cluster:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Display all server ports and names from cluster1
|
|
||||||
ansible.builtin.debug:
|
|
||||||
var: item
|
|
||||||
loop: "{{ domain_definition | community.general.json_query(server_name_cluster1_query) }}"
|
|
||||||
vars:
|
|
||||||
server_name_cluster1_query: "domain.server[?cluster=='cluster2'].{name: name, port: port}"
|
|
||||||
|
|
||||||
To extract ports from all clusters with name starting with 'server1':
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Display all ports from cluster1
|
|
||||||
ansible.builtin.debug:
|
|
||||||
msg: "{{ domain_definition | to_json | from_json | community.general.json_query(server_name_query) }}"
|
|
||||||
vars:
|
|
||||||
server_name_query: "domain.server[?starts_with(name,'server1')].port"
|
|
||||||
|
|
||||||
To extract ports from all clusters with name containing 'server1':
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Display all ports from cluster1
|
|
||||||
ansible.builtin.debug:
|
|
||||||
msg: "{{ domain_definition | to_json | from_json | community.general.json_query(server_name_query) }}"
|
|
||||||
vars:
|
|
||||||
server_name_query: "domain.server[?contains(name,'server1')].port"
|
|
||||||
|
|
||||||
.. note:: while using ``starts_with`` and ``contains``, you have to use `` to_json | from_json `` filter for correct parsing of data structure.
|
|
||||||
@@ -1,84 +0,0 @@
|
|||||||
Working with times
|
|
||||||
------------------
|
|
||||||
|
|
||||||
The ``to_time_unit`` filter allows to convert times from a human-readable string to a unit. For example, ``'4h 30min 12second' | community.general.to_time_unit('hour')`` gives the number of hours that correspond to 4 hours, 30 minutes and 12 seconds.
|
|
||||||
|
|
||||||
There are shorthands to directly convert to various units, like ``to_hours``, ``to_minutes``, ``to_seconds``, and so on. The following table lists all units that can be used:
|
|
||||||
|
|
||||||
.. list-table:: Units
|
|
||||||
:widths: 25 25 25 25
|
|
||||||
:header-rows: 1
|
|
||||||
|
|
||||||
* - Unit name
|
|
||||||
- Unit value in seconds
|
|
||||||
- Unit strings for filter
|
|
||||||
- Shorthand filter
|
|
||||||
* - Millisecond
|
|
||||||
- 1/1000 second
|
|
||||||
- ``ms``, ``millisecond``, ``milliseconds``, ``msec``, ``msecs``, ``msecond``, ``mseconds``
|
|
||||||
- ``to_milliseconds``
|
|
||||||
* - Second
|
|
||||||
- 1 second
|
|
||||||
- ``s``, ``sec``, ``secs``, ``second``, ``seconds``
|
|
||||||
- ``to_seconds``
|
|
||||||
* - Minute
|
|
||||||
- 60 seconds
|
|
||||||
- ``m``, ``min``, ``mins``, ``minute``, ``minutes``
|
|
||||||
- ``to_minutes``
|
|
||||||
* - Hour
|
|
||||||
- 60*60 seconds
|
|
||||||
- ``h``, ``hour``, ``hours``
|
|
||||||
- ``to_hours``
|
|
||||||
* - Day
|
|
||||||
- 24*60*60 seconds
|
|
||||||
- ``d``, ``day``, ``days``
|
|
||||||
- ``to_days``
|
|
||||||
* - Week
|
|
||||||
- 7*24*60*60 seconds
|
|
||||||
- ``w``, ``week``, ``weeks``
|
|
||||||
- ``to_weeks``
|
|
||||||
* - Month
|
|
||||||
- 30*24*60*60 seconds
|
|
||||||
- ``mo``, ``month``, ``months``
|
|
||||||
- ``to_months``
|
|
||||||
* - Year
|
|
||||||
- 365*24*60*60 seconds
|
|
||||||
- ``y``, ``year``, ``years``
|
|
||||||
- ``to_years``
|
|
||||||
|
|
||||||
Note that months and years are using a simplified representation: a month is 30 days, and a year is 365 days. If you need different definitions of months or years, you can pass them as keyword arguments. For example, if you want a year to be 365.25 days, and a month to be 30.5 days, you can write ``'11months 4' | community.general.to_years(year=365.25, month=30.5)``. These keyword arguments can be specified to ``to_time_unit`` and to all shorthand filters.
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Convert string to seconds
|
|
||||||
debug:
|
|
||||||
msg: "{{ '30h 20m 10s 123ms' | community.general.to_time_unit('seconds') }}"
|
|
||||||
|
|
||||||
- name: Convert string to hours
|
|
||||||
debug:
|
|
||||||
msg: "{{ '30h 20m 10s 123ms' | community.general.to_hours }}"
|
|
||||||
|
|
||||||
- name: Convert string to years (using 365.25 days == 1 year)
|
|
||||||
debug:
|
|
||||||
msg: "{{ '400d 15h' | community.general.to_years(year=365.25) }}"
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: ansible-output
|
|
||||||
|
|
||||||
TASK [Convert string to seconds] **********************************************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": "109210.123"
|
|
||||||
}
|
|
||||||
|
|
||||||
TASK [Convert string to hours] ************************************************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": "30.336145277778"
|
|
||||||
}
|
|
||||||
|
|
||||||
TASK [Convert string to years (using 365.25 days == 1 year)] ******************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": "1.096851471595"
|
|
||||||
}
|
|
||||||
|
|
||||||
.. versionadded: 0.2.0
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
Working with Unicode
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
`Unicode <https://unicode.org/main.html>`_ makes it possible to produce two strings which may be visually equivalent, but are comprised of distinctly different characters/character sequences. To address this ``Unicode`` defines `normalization forms <https://unicode.org/reports/tr15/>`_ which avoid these distinctions by choosing a unique character sequence for a given visual representation.
|
|
||||||
|
|
||||||
You can use the ``community.general.unicode_normalize`` filter to normalize ``Unicode`` strings within your playbooks.
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Compare Unicode representations
|
|
||||||
debug:
|
|
||||||
msg: "{{ with_combining_character | community.general.unicode_normalize == without_combining_character }}"
|
|
||||||
vars:
|
|
||||||
with_combining_character: "{{ 'Mayagu\u0308ez' }}"
|
|
||||||
without_combining_character: Mayagüez
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: ansible-output
|
|
||||||
|
|
||||||
TASK [Compare Unicode representations] ********************************************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"msg": true
|
|
||||||
}
|
|
||||||
|
|
||||||
The ``community.general.unicode_normalize`` filter accepts a keyword argument to select the ``Unicode`` form used to normalize the input string.
|
|
||||||
|
|
||||||
:form: One of ``'NFC'`` (default), ``'NFD'``, ``'NFKC'``, or ``'NFKD'``. See the `Unicode reference <https://unicode.org/reports/tr15/>`_ for more information.
|
|
||||||
|
|
||||||
.. versionadded:: 3.7.0
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
Working with versions
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
If you need to sort a list of version numbers, the Jinja ``sort`` filter is problematic. Since it sorts lexicographically, ``2.10`` will come before ``2.9``. To treat version numbers correctly, you can use the ``version_sort`` filter:
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Sort list by version number
|
|
||||||
debug:
|
|
||||||
var: ansible_versions | community.general.version_sort
|
|
||||||
vars:
|
|
||||||
ansible_versions:
|
|
||||||
- '2.8.0'
|
|
||||||
- '2.11.0'
|
|
||||||
- '2.7.0'
|
|
||||||
- '2.10.0'
|
|
||||||
- '2.9.0'
|
|
||||||
|
|
||||||
This produces:
|
|
||||||
|
|
||||||
.. code-block:: ansible-output
|
|
||||||
|
|
||||||
TASK [Sort list by version number] ********************************************************
|
|
||||||
ok: [localhost] => {
|
|
||||||
"ansible_versions | community.general.version_sort": [
|
|
||||||
"2.7.0",
|
|
||||||
"2.8.0",
|
|
||||||
"2.9.0",
|
|
||||||
"2.10.0",
|
|
||||||
"2.11.0"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
.. versionadded: 2.2.0
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
.. _ansible_collections.community.general.docsite.test_guide:
|
|
||||||
|
|
||||||
community.general Test (Plugin) Guide
|
|
||||||
=====================================
|
|
||||||
|
|
||||||
The :ref:`community.general collection <plugins_in_community.general>` offers currently one test plugin.
|
|
||||||
|
|
||||||
.. contents:: Topics
|
|
||||||
|
|
||||||
Feature Tests
|
|
||||||
-------------
|
|
||||||
|
|
||||||
The ``a_module`` test allows to check whether a given string refers to an existing module or action plugin. This can be useful in roles, which can use this to ensure that required modules are present ahead of time.
|
|
||||||
|
|
||||||
.. code-block:: yaml+jinja
|
|
||||||
|
|
||||||
- name: Make sure that community.aws.route53 is available
|
|
||||||
assert:
|
|
||||||
that:
|
|
||||||
- >
|
|
||||||
'community.aws.route53' is community.general.a_module
|
|
||||||
|
|
||||||
- name: Make sure that community.general.does_not_exist is not a module or action plugin
|
|
||||||
assert:
|
|
||||||
that:
|
|
||||||
- "'community.general.does_not_exist' is not community.general.a_module"
|
|
||||||
|
|
||||||
.. versionadded:: 4.0.0
|
|
||||||
12
galaxy.yml
12
galaxy.yml
@@ -1,16 +1,20 @@
|
|||||||
namespace: community
|
namespace: community
|
||||||
name: general
|
name: general
|
||||||
version: 5.0.0
|
version: 1.2.0
|
||||||
readme: README.md
|
readme: README.md
|
||||||
authors:
|
authors:
|
||||||
- Ansible (https://github.com/ansible)
|
- Ansible (https://github.com/ansible)
|
||||||
description: null
|
description: null
|
||||||
license_file: COPYING
|
license_file: COPYING
|
||||||
tags: [community]
|
tags: [community]
|
||||||
# NOTE: No dependencies are expected to be added here
|
# NOTE: No more dependencies can be added to this list
|
||||||
# dependencies:
|
dependencies:
|
||||||
|
ansible.netcommon: '>=1.0.0'
|
||||||
|
ansible.posix: '>=1.0.0'
|
||||||
|
community.kubernetes: '>=1.0.0'
|
||||||
|
google.cloud: '>=1.0.0'
|
||||||
repository: https://github.com/ansible-collections/community.general
|
repository: https://github.com/ansible-collections/community.general
|
||||||
documentation: https://docs.ansible.com/ansible/latest/collections/community/general/
|
#documentation: https://github.com/ansible-collection-migration/community.general/tree/main/docs
|
||||||
homepage: https://github.com/ansible-collections/community.general
|
homepage: https://github.com/ansible-collections/community.general
|
||||||
issues: https://github.com/ansible-collections/community.general/issues
|
issues: https://github.com/ansible-collections/community.general/issues
|
||||||
#type: flatmap
|
#type: flatmap
|
||||||
|
|||||||
2106
meta/runtime.yml
2106
meta/runtime.yml
File diff suppressed because it is too large
Load Diff
1
plugins/action/iptables_state.py
Symbolic link
1
plugins/action/iptables_state.py
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
./system/iptables_state.py
|
||||||
1
plugins/action/shutdown.py
Symbolic link
1
plugins/action/shutdown.py
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
./system/shutdown.py
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright: (c) 2020, quidame <quidame@poivron.org>
|
# Copyright: (c) 2020, quidame <quidame@poivron.org>
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
@@ -8,7 +7,7 @@ __metaclass__ = type
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
from ansible.plugins.action import ActionBase
|
from ansible.plugins.action import ActionBase
|
||||||
from ansible.errors import AnsibleActionFail, AnsibleConnectionFailure
|
from ansible.errors import AnsibleError, AnsibleActionFail, AnsibleConnectionFailure
|
||||||
from ansible.utils.vars import merge_hash
|
from ansible.utils.vars import merge_hash
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
@@ -41,27 +40,19 @@ class ActionModule(ActionBase):
|
|||||||
"(=%s) to 0, and 'async' (=%s) to a value >2 and not greater than "
|
"(=%s) to 0, and 'async' (=%s) to a value >2 and not greater than "
|
||||||
"'ansible_timeout' (=%s) (recommended).")
|
"'ansible_timeout' (=%s) (recommended).")
|
||||||
|
|
||||||
def _async_result(self, async_status_args, task_vars, timeout):
|
def _async_result(self, module_args, task_vars, timeout):
|
||||||
'''
|
'''
|
||||||
Retrieve results of the asynchonous task, and display them in place of
|
Retrieve results of the asynchonous task, and display them in place of
|
||||||
the async wrapper results (those with the ansible_job_id key).
|
the async wrapper results (those with the ansible_job_id key).
|
||||||
'''
|
'''
|
||||||
async_status = self._task.copy()
|
|
||||||
async_status.args = async_status_args
|
|
||||||
async_status.action = 'ansible.builtin.async_status'
|
|
||||||
async_status.async_val = 0
|
|
||||||
async_action = self._shared_loader_obj.action_loader.get(
|
|
||||||
async_status.action, task=async_status, connection=self._connection,
|
|
||||||
play_context=self._play_context, loader=self._loader, templar=self._templar,
|
|
||||||
shared_loader_obj=self._shared_loader_obj)
|
|
||||||
|
|
||||||
if async_status.args['mode'] == 'cleanup':
|
|
||||||
return async_action.run(task_vars=task_vars)
|
|
||||||
|
|
||||||
# At least one iteration is required, even if timeout is 0.
|
# At least one iteration is required, even if timeout is 0.
|
||||||
for dummy in range(max(1, timeout)):
|
for i in range(max(1, timeout)):
|
||||||
async_result = async_action.run(task_vars=task_vars)
|
async_result = self._execute_module(
|
||||||
if async_result.get('finished', 0) == 1:
|
module_name='ansible.builtin.async_status',
|
||||||
|
module_args=module_args,
|
||||||
|
task_vars=task_vars,
|
||||||
|
wrap_async=False)
|
||||||
|
if async_result['finished'] == 1:
|
||||||
break
|
break
|
||||||
time.sleep(min(1, timeout))
|
time.sleep(min(1, timeout))
|
||||||
|
|
||||||
@@ -85,6 +76,7 @@ class ActionModule(ActionBase):
|
|||||||
task_async = self._task.async_val
|
task_async = self._task.async_val
|
||||||
check_mode = self._play_context.check_mode
|
check_mode = self._play_context.check_mode
|
||||||
max_timeout = self._connection._play_context.timeout
|
max_timeout = self._connection._play_context.timeout
|
||||||
|
module_name = self._task.action
|
||||||
module_args = self._task.args
|
module_args = self._task.args
|
||||||
|
|
||||||
if module_args.get('state', None) == 'restored':
|
if module_args.get('state', None) == 'restored':
|
||||||
@@ -106,29 +98,42 @@ class ActionModule(ActionBase):
|
|||||||
task_async,
|
task_async,
|
||||||
max_timeout))
|
max_timeout))
|
||||||
|
|
||||||
# inject the async directory based on the shell option into the
|
# BEGIN snippet from async_status action plugin
|
||||||
# module args
|
env_async_dir = [e for e in self._task.environment if
|
||||||
async_dir = self.get_shell_option('async_dir', default="~/.ansible_async")
|
"ANSIBLE_ASYNC_DIR" in e]
|
||||||
|
if len(env_async_dir) > 0:
|
||||||
|
# for backwards compatibility we need to get the dir from
|
||||||
|
# ANSIBLE_ASYNC_DIR that is defined in the environment. This is
|
||||||
|
# deprecated and will be removed in favour of shell options
|
||||||
|
async_dir = env_async_dir[0]['ANSIBLE_ASYNC_DIR']
|
||||||
|
|
||||||
|
msg = "Setting the async dir from the environment keyword " \
|
||||||
|
"ANSIBLE_ASYNC_DIR is deprecated. Set the async_dir " \
|
||||||
|
"shell option instead"
|
||||||
|
display.deprecated(msg, version='2.0.0',
|
||||||
|
collection_name='community.general') # was Ansible 2.12
|
||||||
|
else:
|
||||||
|
# inject the async directory based on the shell option into the
|
||||||
|
# module args
|
||||||
|
async_dir = self.get_shell_option('async_dir', default="~/.ansible_async")
|
||||||
|
# END snippet from async_status action plugin
|
||||||
|
|
||||||
# Bind the loop max duration to consistent values on both
|
# Bind the loop max duration to consistent values on both
|
||||||
# remote and local sides (if not the same, make the loop
|
# remote and local sides (if not the same, make the loop
|
||||||
# longer on the controller); and set a backup file path.
|
# longer on the controller); and set a backup file path.
|
||||||
module_args['_timeout'] = task_async
|
module_args['_timeout'] = task_async
|
||||||
module_args['_back'] = '%s/iptables.state' % async_dir
|
module_args['_back'] = '%s/iptables.state' % async_dir
|
||||||
async_status_args = dict(mode='status')
|
async_status_args = dict(_async_dir=async_dir)
|
||||||
confirm_cmd = 'rm -f %s' % module_args['_back']
|
confirm_cmd = 'rm -f %s' % module_args['_back']
|
||||||
starter_cmd = 'touch %s.starter' % module_args['_back']
|
|
||||||
remaining_time = max(task_async, max_timeout)
|
remaining_time = max(task_async, max_timeout)
|
||||||
|
|
||||||
# do work!
|
# do work!
|
||||||
result = merge_hash(result, self._execute_module(module_args=module_args, task_vars=task_vars, wrap_async=wrap_async))
|
result = merge_hash(result, self._execute_module(module_args=module_args, task_vars=task_vars, wrap_async=wrap_async))
|
||||||
|
|
||||||
# Then the 3-steps "go ahead or rollback":
|
# Then the 3-steps "go ahead or rollback":
|
||||||
# 1. Catch early errors of the module (in asynchronous task) if any.
|
# - reset connection to ensure a persistent one will not be reused
|
||||||
# Touch a file on the target to signal the module to process now.
|
# - confirm the restored state by removing the backup on the remote
|
||||||
# 2. Reset connection to ensure a persistent one will not be reused.
|
# - retrieve the results of the asynchronous task to return them
|
||||||
# 3. Confirm the restored state by removing the backup on the remote.
|
|
||||||
# Retrieve the results of the asynchronous task to return them.
|
|
||||||
if '_back' in module_args:
|
if '_back' in module_args:
|
||||||
async_status_args['jid'] = result.get('ansible_job_id', None)
|
async_status_args['jid'] = result.get('ansible_job_id', None)
|
||||||
if async_status_args['jid'] is None:
|
if async_status_args['jid'] is None:
|
||||||
@@ -138,20 +143,14 @@ class ActionModule(ActionBase):
|
|||||||
# option type/value, missing required system command, etc.
|
# option type/value, missing required system command, etc.
|
||||||
result = merge_hash(result, self._async_result(async_status_args, task_vars, 0))
|
result = merge_hash(result, self._async_result(async_status_args, task_vars, 0))
|
||||||
|
|
||||||
# The module is aware to not process the main iptables-restore
|
|
||||||
# command before finding (and deleting) the 'starter' cookie on
|
|
||||||
# the host, so the previous query will not reach ssh timeout.
|
|
||||||
dummy = self._low_level_execute_command(starter_cmd, sudoable=self.DEFAULT_SUDOABLE)
|
|
||||||
|
|
||||||
# As the main command is not yet executed on the target, here
|
|
||||||
# 'finished' means 'failed before main command be executed'.
|
|
||||||
if not result['finished']:
|
if not result['finished']:
|
||||||
try:
|
try:
|
||||||
self._connection.reset()
|
self._connection.reset()
|
||||||
|
display.v("%s: reset connection" % (module_name))
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
display.warning("Connection plugin does not allow to reset the connection.")
|
||||||
|
|
||||||
for dummy in range(max_timeout):
|
for x in range(max_timeout):
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
remaining_time -= 1
|
remaining_time -= 1
|
||||||
# - AnsibleConnectionFailure covers rejected requests (i.e.
|
# - AnsibleConnectionFailure covers rejected requests (i.e.
|
||||||
@@ -159,7 +158,7 @@ class ActionModule(ActionBase):
|
|||||||
# - ansible_timeout is able to cover dropped requests (due
|
# - ansible_timeout is able to cover dropped requests (due
|
||||||
# to a rule or policy DROP) if not lower than async_val.
|
# to a rule or policy DROP) if not lower than async_val.
|
||||||
try:
|
try:
|
||||||
dummy = self._low_level_execute_command(confirm_cmd, sudoable=self.DEFAULT_SUDOABLE)
|
garbage = self._low_level_execute_command(confirm_cmd, sudoable=self.DEFAULT_SUDOABLE)
|
||||||
break
|
break
|
||||||
except AnsibleConnectionFailure:
|
except AnsibleConnectionFailure:
|
||||||
continue
|
continue
|
||||||
@@ -172,12 +171,16 @@ class ActionModule(ActionBase):
|
|||||||
del result[key]
|
del result[key]
|
||||||
|
|
||||||
if result.get('invocation', {}).get('module_args'):
|
if result.get('invocation', {}).get('module_args'):
|
||||||
for key in ('_back', '_timeout', '_async_dir', 'jid'):
|
if '_timeout' in result['invocation']['module_args']:
|
||||||
if result['invocation']['module_args'].get(key):
|
del result['invocation']['module_args']['_back']
|
||||||
del result['invocation']['module_args'][key]
|
del result['invocation']['module_args']['_timeout']
|
||||||
|
|
||||||
async_status_args['mode'] = 'cleanup'
|
async_status_args['mode'] = 'cleanup'
|
||||||
dummy = self._async_result(async_status_args, task_vars, 0)
|
garbage = self._execute_module(
|
||||||
|
module_name='ansible.builtin.async_status',
|
||||||
|
module_args=async_status_args,
|
||||||
|
task_vars=task_vars,
|
||||||
|
wrap_async=False)
|
||||||
|
|
||||||
if not wrap_async:
|
if not wrap_async:
|
||||||
# remove a temporary path we created
|
# remove a temporary path we created
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright: (c) 2020, Amin Vakil <info@aminvakil.com>
|
# Copyright: (c) 2020, Amin Vakil <info@aminvakil.com>
|
||||||
# Copyright: (c) 2016-2018, Matt Davis <mdavis@ansible.com>
|
# Copyright: (c) 2016-2018, Matt Davis <mdavis@ansible.com>
|
||||||
# Copyright: (c) 2018, Sam Doran <sdoran@redhat.com>
|
# Copyright: (c) 2018, Sam Doran <sdoran@redhat.com>
|
||||||
@@ -8,7 +7,7 @@ from __future__ import (absolute_import, division, print_function)
|
|||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
from ansible.errors import AnsibleError, AnsibleConnectionFailure
|
from ansible.errors import AnsibleError, AnsibleConnectionFailure
|
||||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
from ansible.module_utils._text import to_native, to_text
|
||||||
from ansible.module_utils.common.collections import is_string
|
from ansible.module_utils.common.collections import is_string
|
||||||
from ansible.plugins.action import ActionBase
|
from ansible.plugins.action import ActionBase
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|||||||
@@ -5,11 +5,11 @@ from __future__ import (absolute_import, division, print_function)
|
|||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
name: doas
|
become: doas
|
||||||
short_description: Do As user
|
short_description: Do As user
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the doas utility.
|
- This become plugins allows your remote/login user to execute commands as another user via the doas utility.
|
||||||
author: Ansible Core Team
|
author: ansible (@core)
|
||||||
options:
|
options:
|
||||||
become_user:
|
become_user:
|
||||||
description: User you 'become' to execute the task
|
description: User you 'become' to execute the task
|
||||||
@@ -81,7 +81,7 @@ DOCUMENTATION = '''
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes
|
from ansible.module_utils._text import to_bytes
|
||||||
from ansible.plugins.become import BecomeBase
|
from ansible.plugins.become import BecomeBase
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -4,11 +4,11 @@ from __future__ import (absolute_import, division, print_function)
|
|||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
name: dzdo
|
become: dzdo
|
||||||
short_description: Centrify's Direct Authorize
|
short_description: Centrify's Direct Authorize
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the dzdo utility.
|
- This become plugins allows your remote/login user to execute commands as another user via the dzdo utility.
|
||||||
author: Ansible Core Team
|
author: ansible (@core)
|
||||||
options:
|
options:
|
||||||
become_user:
|
become_user:
|
||||||
description: User you 'become' to execute the task
|
description: User you 'become' to execute the task
|
||||||
|
|||||||
@@ -5,11 +5,11 @@ from __future__ import (absolute_import, division, print_function)
|
|||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
name: ksu
|
become: ksu
|
||||||
short_description: Kerberos substitute user
|
short_description: Kerberos substitute user
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the ksu utility.
|
- This become plugins allows your remote/login user to execute commands as another user via the ksu utility.
|
||||||
author: Ansible Core Team
|
author: ansible (@core)
|
||||||
options:
|
options:
|
||||||
become_user:
|
become_user:
|
||||||
description: User you 'become' to execute the task
|
description: User you 'become' to execute the task
|
||||||
@@ -82,7 +82,7 @@ DOCUMENTATION = '''
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes
|
from ansible.module_utils._text import to_bytes
|
||||||
from ansible.plugins.become import BecomeBase
|
from ansible.plugins.become import BecomeBase
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -5,11 +5,11 @@ from __future__ import (absolute_import, division, print_function)
|
|||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
name: machinectl
|
become: machinectl
|
||||||
short_description: Systemd's machinectl privilege escalation
|
short_description: Systemd's machinectl privilege escalation
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the machinectl utility.
|
- This become plugins allows your remote/login user to execute commands as another user via the machinectl utility.
|
||||||
author: Ansible Core Team
|
author: ansible (@core)
|
||||||
options:
|
options:
|
||||||
become_user:
|
become_user:
|
||||||
description: User you 'become' to execute the task
|
description: User you 'become' to execute the task
|
||||||
|
|||||||
@@ -5,11 +5,11 @@ from __future__ import (absolute_import, division, print_function)
|
|||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
name: pbrun
|
become: pbrun
|
||||||
short_description: PowerBroker run
|
short_description: PowerBroker run
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the pbrun utility.
|
- This become plugins allows your remote/login user to execute commands as another user via the pbrun utility.
|
||||||
author: Ansible Core Team
|
author: ansible (@core)
|
||||||
options:
|
options:
|
||||||
become_user:
|
become_user:
|
||||||
description: User you 'become' to execute the task
|
description: User you 'become' to execute the task
|
||||||
|
|||||||
@@ -5,11 +5,11 @@ from __future__ import (absolute_import, division, print_function)
|
|||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
name: pfexec
|
become: pfexec
|
||||||
short_description: profile based execution
|
short_description: profile based execution
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the pfexec utility.
|
- This become plugins allows your remote/login user to execute commands as another user via the pfexec utility.
|
||||||
author: Ansible Core Team
|
author: ansible (@core)
|
||||||
options:
|
options:
|
||||||
become_user:
|
become_user:
|
||||||
description:
|
description:
|
||||||
|
|||||||
@@ -5,11 +5,11 @@ from __future__ import (absolute_import, division, print_function)
|
|||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
name: pmrun
|
become: pmrun
|
||||||
short_description: Privilege Manager run
|
short_description: Privilege Manager run
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the pmrun utility.
|
- This become plugins allows your remote/login user to execute commands as another user via the pmrun utility.
|
||||||
author: Ansible Core Team
|
author: ansible (@core)
|
||||||
options:
|
options:
|
||||||
become_exe:
|
become_exe:
|
||||||
description: Sudo executable
|
description: Sudo executable
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from __future__ import (absolute_import, division, print_function)
|
|||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
name: sesu
|
become: sesu
|
||||||
short_description: CA Privileged Access Manager
|
short_description: CA Privileged Access Manager
|
||||||
description:
|
description:
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the sesu utility.
|
- This become plugins allows your remote/login user to execute commands as another user via the sesu utility.
|
||||||
|
|||||||
@@ -1,91 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright: (c) 2021, Ansible Project
|
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = """
|
|
||||||
name: sudosu
|
|
||||||
short_description: Run tasks using sudo su -
|
|
||||||
description:
|
|
||||||
- This become plugins allows your remote/login user to execute commands as another user via the C(sudo) and C(su) utilities combined.
|
|
||||||
author:
|
|
||||||
- Dag Wieers (@dagwieers)
|
|
||||||
version_added: 2.4.0
|
|
||||||
options:
|
|
||||||
become_user:
|
|
||||||
description: User you 'become' to execute the task.
|
|
||||||
default: root
|
|
||||||
ini:
|
|
||||||
- section: privilege_escalation
|
|
||||||
key: become_user
|
|
||||||
- section: sudo_become_plugin
|
|
||||||
key: user
|
|
||||||
vars:
|
|
||||||
- name: ansible_become_user
|
|
||||||
- name: ansible_sudo_user
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_BECOME_USER
|
|
||||||
- name: ANSIBLE_SUDO_USER
|
|
||||||
become_flags:
|
|
||||||
description: Options to pass to C(sudo).
|
|
||||||
default: -H -S -n
|
|
||||||
ini:
|
|
||||||
- section: privilege_escalation
|
|
||||||
key: become_flags
|
|
||||||
- section: sudo_become_plugin
|
|
||||||
key: flags
|
|
||||||
vars:
|
|
||||||
- name: ansible_become_flags
|
|
||||||
- name: ansible_sudo_flags
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_BECOME_FLAGS
|
|
||||||
- name: ANSIBLE_SUDO_FLAGS
|
|
||||||
become_pass:
|
|
||||||
description: Password to pass to C(sudo).
|
|
||||||
required: false
|
|
||||||
vars:
|
|
||||||
- name: ansible_become_password
|
|
||||||
- name: ansible_become_pass
|
|
||||||
- name: ansible_sudo_pass
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_BECOME_PASS
|
|
||||||
- name: ANSIBLE_SUDO_PASS
|
|
||||||
ini:
|
|
||||||
- section: sudo_become_plugin
|
|
||||||
key: password
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
from ansible.plugins.become import BecomeBase
|
|
||||||
|
|
||||||
|
|
||||||
class BecomeModule(BecomeBase):
|
|
||||||
|
|
||||||
name = 'community.general.sudosu'
|
|
||||||
|
|
||||||
# messages for detecting prompted password issues
|
|
||||||
fail = ('Sorry, try again.',)
|
|
||||||
missing = ('Sorry, a password is required to run sudo', 'sudo: a password is required')
|
|
||||||
|
|
||||||
def build_become_command(self, cmd, shell):
|
|
||||||
super(BecomeModule, self).build_become_command(cmd, shell)
|
|
||||||
|
|
||||||
if not cmd:
|
|
||||||
return cmd
|
|
||||||
|
|
||||||
becomecmd = 'sudo'
|
|
||||||
|
|
||||||
flags = self.get_option('become_flags') or ''
|
|
||||||
prompt = ''
|
|
||||||
if self.get_option('become_pass'):
|
|
||||||
self.prompt = '[sudo via ansible, key=%s] password:' % self._id
|
|
||||||
if flags: # this could be simplified, but kept as is for now for backwards string matching
|
|
||||||
flags = flags.replace('-n', '')
|
|
||||||
prompt = '-p "%s"' % (self.prompt)
|
|
||||||
|
|
||||||
user = self.get_option('become_user') or ''
|
|
||||||
if user:
|
|
||||||
user = '%s' % (user)
|
|
||||||
|
|
||||||
return ' '.join([becomecmd, flags, prompt, 'su -l', user, self._build_success_command(cmd, shell)])
|
|
||||||
40
plugins/cache/memcached.py
vendored
40
plugins/cache/memcached.py
vendored
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# (c) 2014, Brian Coca, Josh Drake, et al
|
# (c) 2014, Brian Coca, Josh Drake, et al
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
@@ -8,7 +7,7 @@ __metaclass__ = type
|
|||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: memcached
|
cache: memcached
|
||||||
short_description: Use memcached DB for cache
|
short_description: Use memcached DB for cache
|
||||||
description:
|
description:
|
||||||
- This cache uses JSON formatted, per host records saved in memcached.
|
- This cache uses JSON formatted, per host records saved in memcached.
|
||||||
@@ -20,7 +19,6 @@ DOCUMENTATION = '''
|
|||||||
- List of connection information for the memcached DBs
|
- List of connection information for the memcached DBs
|
||||||
default: ['127.0.0.1:11211']
|
default: ['127.0.0.1:11211']
|
||||||
type: list
|
type: list
|
||||||
elements: string
|
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
||||||
ini:
|
ini:
|
||||||
@@ -55,14 +53,12 @@ from ansible import constants as C
|
|||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils.common._collections_compat import MutableSet
|
from ansible.module_utils.common._collections_compat import MutableSet
|
||||||
from ansible.plugins.cache import BaseCacheModule
|
from ansible.plugins.cache import BaseCacheModule
|
||||||
from ansible.release import __version__ as ansible_base_version
|
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import memcache
|
import memcache
|
||||||
HAS_MEMCACHE = True
|
|
||||||
except ImportError:
|
except ImportError:
|
||||||
HAS_MEMCACHE = False
|
raise AnsibleError("python-memcached is required for the memcached fact cache")
|
||||||
|
|
||||||
display = Display()
|
display = Display()
|
||||||
|
|
||||||
@@ -155,16 +151,16 @@ class CacheModuleKeys(MutableSet):
|
|||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(self._keyset)
|
return len(self._keyset)
|
||||||
|
|
||||||
def add(self, value):
|
def add(self, key):
|
||||||
self._keyset[value] = time.time()
|
self._keyset[key] = time.time()
|
||||||
self._cache.set(self.PREFIX, self._keyset)
|
self._cache.set(self.PREFIX, self._keyset)
|
||||||
|
|
||||||
def discard(self, value):
|
def discard(self, key):
|
||||||
del self._keyset[value]
|
del self._keyset[key]
|
||||||
self._cache.set(self.PREFIX, self._keyset)
|
self._cache.set(self.PREFIX, self._keyset)
|
||||||
|
|
||||||
def remove_by_timerange(self, s_min, s_max):
|
def remove_by_timerange(self, s_min, s_max):
|
||||||
for k in list(self._keyset.keys()):
|
for k in self._keyset.keys():
|
||||||
t = self._keyset[k]
|
t = self._keyset[k]
|
||||||
if s_min < t < s_max:
|
if s_min < t < s_max:
|
||||||
del self._keyset[k]
|
del self._keyset[k]
|
||||||
@@ -176,14 +172,20 @@ class CacheModule(BaseCacheModule):
|
|||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
connection = ['127.0.0.1:11211']
|
connection = ['127.0.0.1:11211']
|
||||||
|
|
||||||
super(CacheModule, self).__init__(*args, **kwargs)
|
try:
|
||||||
if self.get_option('_uri'):
|
super(CacheModule, self).__init__(*args, **kwargs)
|
||||||
connection = self.get_option('_uri')
|
if self.get_option('_uri'):
|
||||||
self._timeout = self.get_option('_timeout')
|
connection = self.get_option('_uri')
|
||||||
self._prefix = self.get_option('_prefix')
|
self._timeout = self.get_option('_timeout')
|
||||||
|
self._prefix = self.get_option('_prefix')
|
||||||
if not HAS_MEMCACHE:
|
except KeyError:
|
||||||
raise AnsibleError("python-memcached is required for the memcached fact cache")
|
display.deprecated('Rather than importing CacheModules directly, '
|
||||||
|
'use ansible.plugins.loader.cache_loader',
|
||||||
|
version='2.0.0', collection_name='community.general') # was Ansible 2.12
|
||||||
|
if C.CACHE_PLUGIN_CONNECTION:
|
||||||
|
connection = C.CACHE_PLUGIN_CONNECTION.split(',')
|
||||||
|
self._timeout = C.CACHE_PLUGIN_TIMEOUT
|
||||||
|
self._prefix = C.CACHE_PLUGIN_PREFIX
|
||||||
|
|
||||||
self._cache = {}
|
self._cache = {}
|
||||||
self._db = ProxyClientPool(connection, debug=0)
|
self._db = ProxyClientPool(connection, debug=0)
|
||||||
|
|||||||
3
plugins/cache/pickle.py
vendored
3
plugins/cache/pickle.py
vendored
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# (c) 2017, Brian Coca
|
# (c) 2017, Brian Coca
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
@@ -8,7 +7,7 @@ from __future__ import (absolute_import, division, print_function)
|
|||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
name: pickle
|
cache: pickle
|
||||||
short_description: Pickle formatted files.
|
short_description: Pickle formatted files.
|
||||||
description:
|
description:
|
||||||
- This cache uses Python's pickle serialization format, in per host files, saved to the filesystem.
|
- This cache uses Python's pickle serialization format, in per host files, saved to the filesystem.
|
||||||
|
|||||||
113
plugins/cache/redis.py
vendored
113
plugins/cache/redis.py
vendored
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# (c) 2014, Brian Coca, Josh Drake, et al
|
# (c) 2014, Brian Coca, Josh Drake, et al
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
@@ -7,7 +6,7 @@ __metaclass__ = type
|
|||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: redis
|
cache: redis
|
||||||
short_description: Use Redis DB for cache
|
short_description: Use Redis DB for cache
|
||||||
description:
|
description:
|
||||||
- This cache uses JSON formatted, per host records saved in Redis.
|
- This cache uses JSON formatted, per host records saved in Redis.
|
||||||
@@ -19,7 +18,6 @@ DOCUMENTATION = '''
|
|||||||
- A colon separated string of connection information for Redis.
|
- A colon separated string of connection information for Redis.
|
||||||
- The format is C(host:port:db:password), for example C(localhost:6379:0:changeme).
|
- The format is C(host:port:db:password), for example C(localhost:6379:0:changeme).
|
||||||
- To use encryption in transit, prefix the connection with C(tls://), as in C(tls://localhost:6379:0:changeme).
|
- To use encryption in transit, prefix the connection with C(tls://), as in C(tls://localhost:6379:0:changeme).
|
||||||
- To use redis sentinel, use separator C(;), for example C(localhost:26379;localhost:26379;0:changeme). Requires redis>=2.9.0.
|
|
||||||
required: True
|
required: True
|
||||||
env:
|
env:
|
||||||
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
||||||
@@ -34,23 +32,6 @@ DOCUMENTATION = '''
|
|||||||
ini:
|
ini:
|
||||||
- key: fact_caching_prefix
|
- key: fact_caching_prefix
|
||||||
section: defaults
|
section: defaults
|
||||||
_keyset_name:
|
|
||||||
description: User defined name for cache keyset name.
|
|
||||||
default: ansible_cache_keys
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_CACHE_REDIS_KEYSET_NAME
|
|
||||||
ini:
|
|
||||||
- key: fact_caching_redis_keyset_name
|
|
||||||
section: defaults
|
|
||||||
version_added: 1.3.0
|
|
||||||
_sentinel_service_name:
|
|
||||||
description: The redis sentinel service name (or referenced as cluster name).
|
|
||||||
env:
|
|
||||||
- name: ANSIBLE_CACHE_REDIS_SENTINEL
|
|
||||||
ini:
|
|
||||||
- key: fact_caching_redis_sentinel
|
|
||||||
section: defaults
|
|
||||||
version_added: 1.3.0
|
|
||||||
_timeout:
|
_timeout:
|
||||||
default: 86400
|
default: 86400
|
||||||
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
|
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
|
||||||
@@ -62,23 +43,19 @@ DOCUMENTATION = '''
|
|||||||
type: integer
|
type: integer
|
||||||
'''
|
'''
|
||||||
|
|
||||||
import re
|
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from ansible import constants as C
|
from ansible import constants as C
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
|
||||||
from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
|
from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
|
||||||
from ansible.plugins.cache import BaseCacheModule
|
from ansible.plugins.cache import BaseCacheModule
|
||||||
from ansible.release import __version__ as ansible_base_version
|
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from redis import StrictRedis, VERSION
|
from redis import StrictRedis, VERSION
|
||||||
HAS_REDIS = True
|
|
||||||
except ImportError:
|
except ImportError:
|
||||||
HAS_REDIS = False
|
raise AnsibleError("The 'redis' python module (version 2.4.5 or newer) is required for the redis fact cache, 'pip install redis'")
|
||||||
|
|
||||||
display = Display()
|
display = Display()
|
||||||
|
|
||||||
@@ -92,80 +69,34 @@ class CacheModule(BaseCacheModule):
|
|||||||
to expire keys. This mechanism is used or a pattern matched 'scan' for
|
to expire keys. This mechanism is used or a pattern matched 'scan' for
|
||||||
performance.
|
performance.
|
||||||
"""
|
"""
|
||||||
_sentinel_service_name = None
|
|
||||||
re_url_conn = re.compile(r'^([^:]+|\[[^]]+\]):(\d+):(\d+)(?::(.*))?$')
|
|
||||||
re_sent_conn = re.compile(r'^(.*):(\d+)$')
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
uri = ''
|
uri = ''
|
||||||
|
|
||||||
super(CacheModule, self).__init__(*args, **kwargs)
|
try:
|
||||||
if self.get_option('_uri'):
|
super(CacheModule, self).__init__(*args, **kwargs)
|
||||||
uri = self.get_option('_uri')
|
if self.get_option('_uri'):
|
||||||
self._timeout = float(self.get_option('_timeout'))
|
uri = self.get_option('_uri')
|
||||||
self._prefix = self.get_option('_prefix')
|
self._timeout = float(self.get_option('_timeout'))
|
||||||
self._keys_set = self.get_option('_keyset_name')
|
self._prefix = self.get_option('_prefix')
|
||||||
self._sentinel_service_name = self.get_option('_sentinel_service_name')
|
except KeyError:
|
||||||
|
display.deprecated('Rather than importing CacheModules directly, '
|
||||||
if not HAS_REDIS:
|
'use ansible.plugins.loader.cache_loader',
|
||||||
raise AnsibleError("The 'redis' python module (version 2.4.5 or newer) is required for the redis fact cache, 'pip install redis'")
|
version='2.0.0', collection_name='community.general') # was Ansible 2.12
|
||||||
|
if C.CACHE_PLUGIN_CONNECTION:
|
||||||
|
uri = C.CACHE_PLUGIN_CONNECTION
|
||||||
|
self._timeout = float(C.CACHE_PLUGIN_TIMEOUT)
|
||||||
|
self._prefix = C.CACHE_PLUGIN_PREFIX
|
||||||
|
|
||||||
self._cache = {}
|
self._cache = {}
|
||||||
kw = {}
|
kw = {}
|
||||||
|
|
||||||
# tls connection
|
|
||||||
tlsprefix = 'tls://'
|
tlsprefix = 'tls://'
|
||||||
if uri.startswith(tlsprefix):
|
if uri.startswith(tlsprefix):
|
||||||
kw['ssl'] = True
|
kw['ssl'] = True
|
||||||
uri = uri[len(tlsprefix):]
|
uri = uri[len(tlsprefix):]
|
||||||
|
|
||||||
# redis sentinel connection
|
connection = uri.split(':')
|
||||||
if self._sentinel_service_name:
|
self._db = StrictRedis(*connection, **kw)
|
||||||
self._db = self._get_sentinel_connection(uri, kw)
|
self._keys_set = 'ansible_cache_keys'
|
||||||
# normal connection
|
|
||||||
else:
|
|
||||||
connection = self._parse_connection(self.re_url_conn, uri)
|
|
||||||
self._db = StrictRedis(*connection, **kw)
|
|
||||||
|
|
||||||
display.vv('Redis connection: %s' % self._db)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _parse_connection(re_patt, uri):
|
|
||||||
match = re_patt.match(uri)
|
|
||||||
if not match:
|
|
||||||
raise AnsibleError("Unable to parse connection string")
|
|
||||||
return match.groups()
|
|
||||||
|
|
||||||
def _get_sentinel_connection(self, uri, kw):
|
|
||||||
"""
|
|
||||||
get sentinel connection details from _uri
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
from redis.sentinel import Sentinel
|
|
||||||
except ImportError:
|
|
||||||
raise AnsibleError("The 'redis' python module (version 2.9.0 or newer) is required to use redis sentinel.")
|
|
||||||
|
|
||||||
if ';' not in uri:
|
|
||||||
raise AnsibleError('_uri does not have sentinel syntax.')
|
|
||||||
|
|
||||||
# format: "localhost:26379;localhost2:26379;0:changeme"
|
|
||||||
connections = uri.split(';')
|
|
||||||
connection_args = connections.pop(-1)
|
|
||||||
if len(connection_args) > 0: # hanle if no db nr is given
|
|
||||||
connection_args = connection_args.split(':')
|
|
||||||
kw['db'] = connection_args.pop(0)
|
|
||||||
try:
|
|
||||||
kw['password'] = connection_args.pop(0)
|
|
||||||
except IndexError:
|
|
||||||
pass # password is optional
|
|
||||||
|
|
||||||
sentinels = [self._parse_connection(self.re_sent_conn, shost) for shost in connections]
|
|
||||||
display.vv('\nUsing redis sentinels: %s' % sentinels)
|
|
||||||
scon = Sentinel(sentinels, **kw)
|
|
||||||
try:
|
|
||||||
return scon.master_for(self._sentinel_service_name, socket_timeout=0.2)
|
|
||||||
except Exception as exc:
|
|
||||||
raise AnsibleError('Could not connect to redis sentinel: %s' % to_native(exc))
|
|
||||||
|
|
||||||
def _make_key(self, key):
|
def _make_key(self, key):
|
||||||
return self._prefix + key
|
return self._prefix + key
|
||||||
@@ -218,12 +149,14 @@ class CacheModule(BaseCacheModule):
|
|||||||
self._db.zrem(self._keys_set, key)
|
self._db.zrem(self._keys_set, key)
|
||||||
|
|
||||||
def flush(self):
|
def flush(self):
|
||||||
for key in list(self.keys()):
|
for key in self.keys():
|
||||||
self.delete(key)
|
self.delete(key)
|
||||||
|
|
||||||
def copy(self):
|
def copy(self):
|
||||||
# TODO: there is probably a better way to do this in redis
|
# TODO: there is probably a better way to do this in redis
|
||||||
ret = dict([(k, self.get(k)) for k in self.keys()])
|
ret = dict()
|
||||||
|
for key in self.keys():
|
||||||
|
ret[key] = self.get(key)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
|
|||||||
3
plugins/cache/yaml.py
vendored
3
plugins/cache/yaml.py
vendored
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# (c) 2017, Brian Coca
|
# (c) 2017, Brian Coca
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
@@ -8,7 +7,7 @@ from __future__ import (absolute_import, division, print_function)
|
|||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
name: yaml
|
cache: yaml
|
||||||
short_description: YAML formatted files.
|
short_description: YAML formatted files.
|
||||||
description:
|
description:
|
||||||
- This cache uses YAML formatted, per host, files saved to the filesystem.
|
- This cache uses YAML formatted, per host, files saved to the filesystem.
|
||||||
|
|||||||
61
plugins/callback/actionable.py
Normal file
61
plugins/callback/actionable.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
# (c) 2015, Andrew Gaffney <andrew@agaffney.org>
|
||||||
|
# (c) 2017 Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
# Make coding more python3-ish
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
DOCUMENTATION = '''
|
||||||
|
author: Unknown (!UNKNOWN)
|
||||||
|
callback: actionable
|
||||||
|
type: stdout
|
||||||
|
short_description: shows only items that need attention
|
||||||
|
description:
|
||||||
|
- Use this callback when you dont care about OK nor Skipped.
|
||||||
|
- This callback suppresses any non Failed or Changed status.
|
||||||
|
deprecated:
|
||||||
|
why: The 'default' callback plugin now supports this functionality
|
||||||
|
removed_in: '2.0.0' # was Ansible 2.11
|
||||||
|
alternative: "'default' callback plugin with 'display_skipped_hosts = no' and 'display_ok_hosts = no' options"
|
||||||
|
extends_documentation_fragment:
|
||||||
|
- default_callback
|
||||||
|
requirements:
|
||||||
|
- set as stdout callback in configuration
|
||||||
|
# Override defaults from 'default' callback plugin
|
||||||
|
options:
|
||||||
|
display_skipped_hosts:
|
||||||
|
name: Show skipped hosts
|
||||||
|
description: "Toggle to control displaying skipped task/host results in a task"
|
||||||
|
type: bool
|
||||||
|
default: no
|
||||||
|
env:
|
||||||
|
- name: DISPLAY_SKIPPED_HOSTS
|
||||||
|
deprecated:
|
||||||
|
why: environment variables without "ANSIBLE_" prefix are deprecated
|
||||||
|
version: "2.0.0" # was Ansible 2.12
|
||||||
|
alternatives: the "ANSIBLE_DISPLAY_SKIPPED_HOSTS" environment variable
|
||||||
|
- name: ANSIBLE_DISPLAY_SKIPPED_HOSTS
|
||||||
|
ini:
|
||||||
|
- key: display_skipped_hosts
|
||||||
|
section: defaults
|
||||||
|
display_ok_hosts:
|
||||||
|
name: Show 'ok' hosts
|
||||||
|
description: "Toggle to control displaying 'ok' task/host results in a task"
|
||||||
|
type: bool
|
||||||
|
default: no
|
||||||
|
env:
|
||||||
|
- name: ANSIBLE_DISPLAY_OK_HOSTS
|
||||||
|
ini:
|
||||||
|
- key: display_ok_hosts
|
||||||
|
section: defaults
|
||||||
|
'''
|
||||||
|
|
||||||
|
from ansible.plugins.callback.default import CallbackModule as CallbackModule_default
|
||||||
|
|
||||||
|
|
||||||
|
class CallbackModule(CallbackModule_default):
|
||||||
|
|
||||||
|
CALLBACK_VERSION = 2.0
|
||||||
|
CALLBACK_TYPE = 'stdout'
|
||||||
|
CALLBACK_NAME = 'community.general.actionable'
|
||||||
@@ -8,7 +8,7 @@ __metaclass__ = type
|
|||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: cgroup_memory_recap
|
callback: cgroup_memory_recap
|
||||||
type: aggregate
|
type: aggregate
|
||||||
requirements:
|
requirements:
|
||||||
- whitelist in configuration
|
- whitelist in configuration
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# (C) 2012, Michael DeHaan, <michael.dehaan@gmail.com>
|
# (C) 2012, Michael DeHaan, <michael.dehaan@gmail.com>
|
||||||
# (c) 2017 Ansible Project
|
# (c) 2017 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
@@ -8,7 +7,7 @@ __metaclass__ = type
|
|||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: context_demo
|
callback: context_demo
|
||||||
type: aggregate
|
type: aggregate
|
||||||
short_description: demo callback that adds play/task context
|
short_description: demo callback that adds play/task context
|
||||||
description:
|
description:
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# (c) 2018, Ivan Aragones Muniesa <ivan.aragones.muniesa@gmail.com>
|
# (c) 2018, Ivan Aragones Muniesa <ivan.aragones.muniesa@gmail.com>
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
'''
|
'''
|
||||||
@@ -10,7 +9,7 @@ __metaclass__ = type
|
|||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
author: Unknown (!UNKNOWN)
|
author: Unknown (!UNKNOWN)
|
||||||
name: counter_enabled
|
callback: counter_enabled
|
||||||
type: stdout
|
type: stdout
|
||||||
short_description: adds counters to the output items (tasks and hosts/task)
|
short_description: adds counters to the output items (tasks and hosts/task)
|
||||||
description:
|
description:
|
||||||
@@ -45,8 +44,6 @@ class CallbackModule(CallbackBase):
|
|||||||
_task_total = 0
|
_task_total = 0
|
||||||
_host_counter = 1
|
_host_counter = 1
|
||||||
_host_total = 0
|
_host_total = 0
|
||||||
_current_batch_total = 0
|
|
||||||
_previous_batch_total = 0
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super(CallbackModule, self).__init__()
|
super(CallbackModule, self).__init__()
|
||||||
@@ -78,11 +75,8 @@ class CallbackModule(CallbackBase):
|
|||||||
self._display.banner(msg)
|
self._display.banner(msg)
|
||||||
self._play = play
|
self._play = play
|
||||||
|
|
||||||
self._previous_batch_total = self._current_batch_total
|
|
||||||
self._current_batch_total = self._previous_batch_total + len(self._all_vars()['vars']['ansible_play_batch'])
|
|
||||||
self._host_total = len(self._all_vars()['vars']['ansible_play_hosts_all'])
|
self._host_total = len(self._all_vars()['vars']['ansible_play_hosts_all'])
|
||||||
self._task_total = len(self._play.get_tasks()[0])
|
self._task_total = len(self._play.get_tasks()[0])
|
||||||
self._task_counter = 1
|
|
||||||
|
|
||||||
def v2_playbook_on_stats(self, stats):
|
def v2_playbook_on_stats(self, stats):
|
||||||
self._display.banner("PLAY RECAP")
|
self._display.banner("PLAY RECAP")
|
||||||
@@ -150,7 +144,7 @@ class CallbackModule(CallbackBase):
|
|||||||
path = task.get_path()
|
path = task.get_path()
|
||||||
if path:
|
if path:
|
||||||
self._display.display("task path: %s" % path, color=C.COLOR_DEBUG)
|
self._display.display("task path: %s" % path, color=C.COLOR_DEBUG)
|
||||||
self._host_counter = self._previous_batch_total
|
self._host_counter = 0
|
||||||
self._task_counter += 1
|
self._task_counter += 1
|
||||||
|
|
||||||
def v2_runner_on_ok(self, result):
|
def v2_runner_on_ok(self, result):
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user