mirror of
https://github.com/ansible-collections/community.crypto.git
synced 2026-05-07 05:43:06 +00:00
Compare commits
67 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
94416989a8 | ||
|
|
b08afe4237 | ||
|
|
7294841a28 | ||
|
|
9b8e4e81a9 | ||
|
|
efda8596a5 | ||
|
|
318462fa24 | ||
|
|
a3a5284f97 | ||
|
|
990b40df3e | ||
|
|
5fbf35df86 | ||
|
|
56f004dc63 | ||
|
|
44bcc8cebc | ||
|
|
a5a4e022ba | ||
|
|
f758d94fba | ||
|
|
82f0176773 | ||
|
|
8156468898 | ||
|
|
12f958c955 | ||
|
|
83beb7148c | ||
|
|
645b7bf9ed | ||
|
|
fbcb89f092 | ||
|
|
86db561193 | ||
|
|
0b8f3306c7 | ||
|
|
5231ac8f3f | ||
|
|
e8fec768cc | ||
|
|
ef230011fd | ||
|
|
65872e884f | ||
|
|
641e63b08c | ||
|
|
d8f838c365 | ||
|
|
266082db72 | ||
|
|
718021b714 | ||
|
|
d368d1943d | ||
|
|
f73a1ce590 | ||
|
|
5bcbd4d0f4 | ||
|
|
797bd8a6e2 | ||
|
|
23de865563 | ||
|
|
4e8a0e456b | ||
|
|
278dcc5dda | ||
|
|
805771d2ed | ||
|
|
5ab56c431f | ||
|
|
aec1826c34 | ||
|
|
04a0d38e3b | ||
|
|
aa9e7b6dfb | ||
|
|
ac134ee5f5 | ||
|
|
154f3c6cd7 | ||
|
|
594ece1a70 | ||
|
|
33ef158b09 | ||
|
|
51a4f76f26 | ||
|
|
f04f0c883e | ||
|
|
72d04577df | ||
|
|
194ab4694e | ||
|
|
04967efe26 | ||
|
|
dbff2a69e2 | ||
|
|
d8773697de | ||
|
|
046aeab5e2 | ||
|
|
a9d6e0048c | ||
|
|
a2d821f960 | ||
|
|
b1451b3460 | ||
|
|
8dabbd8f94 | ||
|
|
a1669d490f | ||
|
|
9ac42ffb11 | ||
|
|
e58fe63dde | ||
|
|
0d1f260328 | ||
|
|
ba55ba7381 | ||
|
|
260bdb1572 | ||
|
|
7d5ebad188 | ||
|
|
78d9fe5813 | ||
|
|
a42e541326 | ||
|
|
673b18d9a9 |
31
.ansible-lint
Normal file
31
.ansible-lint
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
---
|
||||||
|
# Copyright (c) Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
skip_list:
|
||||||
|
# Ignore rules that make no sense:
|
||||||
|
- galaxy[tags]
|
||||||
|
- galaxy[version-incorrect]
|
||||||
|
- meta-runtime[unsupported-version]
|
||||||
|
- no-changed-when
|
||||||
|
- sanity[cannot-ignore] # some of the rules you cannot ignore actually MUST be ignored, like yamllint:unparsable-with-libyaml
|
||||||
|
- yaml # we're using yamllint ourselves
|
||||||
|
|
||||||
|
# To be checked and maybe fixed:
|
||||||
|
- fqcn[action]
|
||||||
|
- fqcn[action-core]
|
||||||
|
- ignore-errors
|
||||||
|
- jinja[spacing]
|
||||||
|
- key-order[task]
|
||||||
|
- name[casing]
|
||||||
|
- name[missing]
|
||||||
|
- name[play]
|
||||||
|
- name[template]
|
||||||
|
- no-free-form
|
||||||
|
- no-handler
|
||||||
|
- risky-file-permissions
|
||||||
|
- risky-shell-pipe
|
||||||
|
- var-naming[no-reserved]
|
||||||
|
- var-naming[pattern]
|
||||||
|
- var-naming[read-only]
|
||||||
@@ -36,8 +36,6 @@ variables:
|
|||||||
value: ansible_collections/community/crypto
|
value: ansible_collections/community/crypto
|
||||||
- name: coverageBranches
|
- name: coverageBranches
|
||||||
value: main
|
value: main
|
||||||
- name: pipelinesCoverage
|
|
||||||
value: coverage
|
|
||||||
- name: entryPoint
|
- name: entryPoint
|
||||||
value: tests/utils/shippable/shippable.sh
|
value: tests/utils/shippable/shippable.sh
|
||||||
- name: fetchDepth
|
- name: fetchDepth
|
||||||
@@ -61,8 +59,6 @@ stages:
|
|||||||
targets:
|
targets:
|
||||||
- name: Sanity
|
- name: Sanity
|
||||||
test: 'devel/sanity/1'
|
test: 'devel/sanity/1'
|
||||||
- name: Sanity Extra # Only on devel
|
|
||||||
test: 'devel/sanity/extra'
|
|
||||||
- name: Units
|
- name: Units
|
||||||
test: 'devel/units/1'
|
test: 'devel/units/1'
|
||||||
- stage: Ansible_2_18
|
- stage: Ansible_2_18
|
||||||
@@ -87,17 +83,6 @@ stages:
|
|||||||
test: '2.17/sanity/1'
|
test: '2.17/sanity/1'
|
||||||
- name: Units
|
- name: Units
|
||||||
test: '2.17/units/1'
|
test: '2.17/units/1'
|
||||||
- stage: Ansible_2_16
|
|
||||||
displayName: Sanity & Units 2.16
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
targets:
|
|
||||||
- name: Sanity
|
|
||||||
test: '2.16/sanity/1'
|
|
||||||
- name: Units
|
|
||||||
test: '2.16/units/1'
|
|
||||||
### Docker
|
### Docker
|
||||||
- stage: Docker_devel
|
- stage: Docker_devel
|
||||||
displayName: Docker devel
|
displayName: Docker devel
|
||||||
@@ -150,23 +135,6 @@ stages:
|
|||||||
groups:
|
groups:
|
||||||
- 1
|
- 1
|
||||||
- 2
|
- 2
|
||||||
- stage: Docker_2_16
|
|
||||||
displayName: Docker 2.16
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.16/linux/{0}
|
|
||||||
targets:
|
|
||||||
- name: Fedora 38
|
|
||||||
test: fedora38
|
|
||||||
- name: openSUSE 15
|
|
||||||
test: opensuse15
|
|
||||||
- name: Alpine 3
|
|
||||||
test: alpine3
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
|
|
||||||
### Community Docker
|
### Community Docker
|
||||||
- stage: Docker_community_devel
|
- stage: Docker_community_devel
|
||||||
@@ -177,10 +145,10 @@ stages:
|
|||||||
parameters:
|
parameters:
|
||||||
testFormat: devel/linux-community/{0}
|
testFormat: devel/linux-community/{0}
|
||||||
targets:
|
targets:
|
||||||
- name: Debian Bullseye
|
|
||||||
test: debian-bullseye/3.9
|
|
||||||
- name: Debian Bookworm
|
- name: Debian Bookworm
|
||||||
test: debian-bookworm/3.11
|
test: debian-bookworm/3.11
|
||||||
|
- name: Debian Bullseye
|
||||||
|
test: debian-bullseye/3.9
|
||||||
- name: ArchLinux
|
- name: ArchLinux
|
||||||
test: archlinux/3.13
|
test: archlinux/3.13
|
||||||
groups:
|
groups:
|
||||||
@@ -214,14 +182,14 @@ stages:
|
|||||||
parameters:
|
parameters:
|
||||||
testFormat: devel/{0}
|
testFormat: devel/{0}
|
||||||
targets:
|
targets:
|
||||||
- name: macOS 14.3
|
- name: macOS 15.3
|
||||||
test: macos/14.3
|
test: macos/15.3
|
||||||
- name: RHEL 9.5
|
- name: RHEL 9.5
|
||||||
test: rhel/9.5
|
test: rhel/9.5
|
||||||
- name: FreeBSD 14.2
|
- name: FreeBSD 14.2
|
||||||
test: freebsd/14.2
|
test: freebsd/14.2
|
||||||
- name: FreeBSD 13.4
|
- name: FreeBSD 13.5
|
||||||
test: freebsd/13.4
|
test: freebsd/13.5
|
||||||
groups:
|
groups:
|
||||||
- 1
|
- 1
|
||||||
- 2
|
- 2
|
||||||
@@ -233,6 +201,8 @@ stages:
|
|||||||
parameters:
|
parameters:
|
||||||
testFormat: 2.18/{0}
|
testFormat: 2.18/{0}
|
||||||
targets:
|
targets:
|
||||||
|
- name: macOS 14.3
|
||||||
|
test: macos/14.3
|
||||||
- name: RHEL 9.4
|
- name: RHEL 9.4
|
||||||
test: rhel/9.4
|
test: rhel/9.4
|
||||||
- name: FreeBSD 14.1
|
- name: FreeBSD 14.1
|
||||||
@@ -252,29 +222,6 @@ stages:
|
|||||||
test: rhel/9.3
|
test: rhel/9.3
|
||||||
- name: FreeBSD 13.3
|
- name: FreeBSD 13.3
|
||||||
test: freebsd/13.3
|
test: freebsd/13.3
|
||||||
- name: FreeBSD 14.0
|
|
||||||
test: freebsd/14.0
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
- stage: Remote_2_16
|
|
||||||
displayName: Remote 2.16
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
testFormat: 2.16/{0}
|
|
||||||
targets:
|
|
||||||
- name: macOS 13.2
|
|
||||||
test: macos/13.2
|
|
||||||
- name: RHEL 9.2
|
|
||||||
test: rhel/9.2
|
|
||||||
- name: RHEL 8.8
|
|
||||||
test: rhel/8.8
|
|
||||||
- name: RHEL 7.9
|
|
||||||
test: rhel/7.9
|
|
||||||
# - name: FreeBSD 13.2
|
|
||||||
# test: freebsd/13.2
|
|
||||||
groups:
|
groups:
|
||||||
- 1
|
- 1
|
||||||
- 2
|
- 2
|
||||||
@@ -289,8 +236,8 @@ stages:
|
|||||||
testFormat: devel/generic/{0}
|
testFormat: devel/generic/{0}
|
||||||
targets:
|
targets:
|
||||||
- test: "3.8"
|
- test: "3.8"
|
||||||
# - test: "3.9"
|
- test: "3.9"
|
||||||
# - test: "3.10"
|
- test: "3.10"
|
||||||
- test: "3.11"
|
- test: "3.11"
|
||||||
- test: "3.13"
|
- test: "3.13"
|
||||||
groups:
|
groups:
|
||||||
@@ -324,21 +271,6 @@ stages:
|
|||||||
groups:
|
groups:
|
||||||
- 1
|
- 1
|
||||||
- 2
|
- 2
|
||||||
- stage: Generic_2_16
|
|
||||||
displayName: Generic 2.16
|
|
||||||
dependsOn: []
|
|
||||||
jobs:
|
|
||||||
- template: templates/matrix.yml
|
|
||||||
parameters:
|
|
||||||
nameFormat: Python {0}
|
|
||||||
testFormat: 2.16/generic/{0}
|
|
||||||
targets:
|
|
||||||
- test: "2.7"
|
|
||||||
- test: "3.6"
|
|
||||||
- test: "3.11"
|
|
||||||
groups:
|
|
||||||
- 1
|
|
||||||
- 2
|
|
||||||
|
|
||||||
## Finally
|
## Finally
|
||||||
|
|
||||||
@@ -348,20 +280,16 @@ stages:
|
|||||||
- Ansible_devel
|
- Ansible_devel
|
||||||
- Ansible_2_18
|
- Ansible_2_18
|
||||||
- Ansible_2_17
|
- Ansible_2_17
|
||||||
- Ansible_2_16
|
|
||||||
- Remote_devel_extra_vms
|
- Remote_devel_extra_vms
|
||||||
- Remote_devel
|
- Remote_devel
|
||||||
- Remote_2_18
|
- Remote_2_18
|
||||||
- Remote_2_17
|
- Remote_2_17
|
||||||
- Remote_2_16
|
|
||||||
- Docker_devel
|
- Docker_devel
|
||||||
- Docker_2_18
|
- Docker_2_18
|
||||||
- Docker_2_17
|
- Docker_2_17
|
||||||
- Docker_2_16
|
|
||||||
- Docker_community_devel
|
- Docker_community_devel
|
||||||
- Generic_devel
|
- Generic_devel
|
||||||
- Generic_2_18
|
- Generic_2_18
|
||||||
- Generic_2_17
|
- Generic_2_17
|
||||||
- Generic_2_16
|
|
||||||
jobs:
|
jobs:
|
||||||
- template: templates/coverage.yml
|
- template: templates/coverage.yml
|
||||||
|
|||||||
@@ -28,16 +28,6 @@ jobs:
|
|||||||
- bash: .azure-pipelines/scripts/report-coverage.sh
|
- bash: .azure-pipelines/scripts/report-coverage.sh
|
||||||
displayName: Generate Coverage Report
|
displayName: Generate Coverage Report
|
||||||
condition: gt(variables.coverageFileCount, 0)
|
condition: gt(variables.coverageFileCount, 0)
|
||||||
- task: PublishCodeCoverageResults@1
|
|
||||||
inputs:
|
|
||||||
codeCoverageTool: Cobertura
|
|
||||||
# Azure Pipelines only accepts a single coverage data file.
|
|
||||||
# That means only Python or PowerShell coverage can be uploaded, but not both.
|
|
||||||
# Set the "pipelinesCoverage" variable to determine which type is uploaded.
|
|
||||||
# Use "coverage" for Python and "coverage-powershell" for PowerShell.
|
|
||||||
summaryFileLocation: "$(outputPath)/reports/$(pipelinesCoverage).xml"
|
|
||||||
displayName: Publish to Azure Pipelines
|
|
||||||
condition: gt(variables.coverageFileCount, 0)
|
|
||||||
- bash: .azure-pipelines/scripts/publish-codecov.py "$(outputPath)"
|
- bash: .azure-pipelines/scripts/publish-codecov.py "$(outputPath)"
|
||||||
displayName: Publish to codecov.io
|
displayName: Publish to codecov.io
|
||||||
condition: gt(variables.coverageFileCount, 0)
|
condition: gt(variables.coverageFileCount, 0)
|
||||||
|
|||||||
13
.flake8
Normal file
13
.flake8
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
# SPDX-FileCopyrightText: 2025 Felix Fontein <felix@fontein.de>
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
extend-ignore = E203, E402, F401
|
||||||
|
count = true
|
||||||
|
# TODO: decrease this to ~10
|
||||||
|
max-complexity = 60
|
||||||
|
# black's max-line-length is 89, but it doesn't touch long string literals.
|
||||||
|
# Since ansible-test's limit is 160, let's use that here.
|
||||||
|
max-line-length = 160
|
||||||
|
statistics = true
|
||||||
10
.git-blame-ignore-revs
Normal file
10
.git-blame-ignore-revs
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
# Copyright (c) Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Reformat YAML: https://github.com/ansible-collections/community.crypto/pull/866
|
||||||
|
33ef158b094f16d5e04ea9db3ed8bad010744d02
|
||||||
|
# Reformat with black, keeping Python 2 compatibility: https://github.com/ansible-collections/community.crypto/pull/871
|
||||||
|
aec1826c34051b9e7f8af7950489915b661e320b
|
||||||
|
# Reformat with black another time, this time without Python 2 compatibility
|
||||||
|
797bd8a6e2a6f4a37a89ecb15ca34ec88b33258d
|
||||||
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
@@ -9,3 +9,7 @@ updates:
|
|||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
|
groups:
|
||||||
|
ci:
|
||||||
|
patterns:
|
||||||
|
- "*"
|
||||||
|
|||||||
311
.github/workflows/ansible-test.yml
vendored
311
.github/workflows/ansible-test.yml
vendored
@@ -1,311 +0,0 @@
|
|||||||
---
|
|
||||||
# Copyright (c) Ansible Project
|
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
||||||
|
|
||||||
# For the comprehensive list of the inputs supported by the ansible-community/ansible-test-gh-action GitHub Action, see
|
|
||||||
# https://github.com/marketplace/actions/ansible-test
|
|
||||||
|
|
||||||
name: EOL CI
|
|
||||||
on:
|
|
||||||
# Run EOL CI against all pushes (direct commits, also merged PRs), Pull Requests
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
- stable-*
|
|
||||||
pull_request:
|
|
||||||
# Run EOL CI once per day (at 09:00 UTC)
|
|
||||||
schedule:
|
|
||||||
- cron: '0 9 * * *'
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
# Make sure there is at most one active run per PR, but do not cancel any non-PR runs
|
|
||||||
group: ${{ github.workflow }}-${{ (github.head_ref && github.event.number) || github.run_id }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
sanity:
|
|
||||||
name: EOL Sanity (Ⓐ${{ matrix.ansible }})
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
ansible:
|
|
||||||
- '2.9'
|
|
||||||
- '2.10'
|
|
||||||
- '2.11'
|
|
||||||
- '2.12'
|
|
||||||
- '2.13'
|
|
||||||
- '2.14'
|
|
||||||
- '2.15'
|
|
||||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
|
||||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
|
||||||
# image for these stable branches. The list of branches where this is necessary will
|
|
||||||
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
|
|
||||||
# for the latest list.
|
|
||||||
runs-on: >-
|
|
||||||
${{ contains(fromJson(
|
|
||||||
'["2.9", "2.10", "2.11"]'
|
|
||||||
), matrix.ansible) && 'ubuntu-20.04' || 'ubuntu-latest' }}
|
|
||||||
steps:
|
|
||||||
- name: Perform sanity testing
|
|
||||||
uses: felixfontein/ansible-test-gh-action@main
|
|
||||||
with:
|
|
||||||
ansible-core-github-repository-slug: ${{ contains(fromJson('["2.9", "2.10", "2.11"]'), matrix.ansible) && 'ansible-community/eol-ansible' || 'ansible/ansible' }}
|
|
||||||
ansible-core-version: stable-${{ matrix.ansible }}
|
|
||||||
codecov-token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
|
||||||
pull-request-change-detection: 'true'
|
|
||||||
testing-type: sanity
|
|
||||||
|
|
||||||
units:
|
|
||||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
|
||||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
|
||||||
# image for these stable branches. The list of branches where this is necessary will
|
|
||||||
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
|
|
||||||
# for the latest list.
|
|
||||||
runs-on: >-
|
|
||||||
${{ contains(fromJson(
|
|
||||||
'["2.9", "2.10", "2.11"]'
|
|
||||||
), matrix.ansible) && 'ubuntu-20.04' || 'ubuntu-latest' }}
|
|
||||||
name: EOL Units (Ⓐ${{ matrix.ansible }})
|
|
||||||
strategy:
|
|
||||||
# As soon as the first unit test fails, cancel the others to free up the CI queue
|
|
||||||
fail-fast: true
|
|
||||||
matrix:
|
|
||||||
ansible:
|
|
||||||
- '2.9'
|
|
||||||
- '2.10'
|
|
||||||
- '2.11'
|
|
||||||
- '2.12'
|
|
||||||
- '2.13'
|
|
||||||
- '2.14'
|
|
||||||
- '2.15'
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: >-
|
|
||||||
Perform unit testing against
|
|
||||||
Ansible version ${{ matrix.ansible }}
|
|
||||||
uses: felixfontein/ansible-test-gh-action@main
|
|
||||||
with:
|
|
||||||
ansible-core-github-repository-slug: ${{ contains(fromJson('["2.9", "2.10", "2.11"]'), matrix.ansible) && 'ansible-community/eol-ansible' || 'ansible/ansible' }}
|
|
||||||
ansible-core-version: stable-${{ matrix.ansible }}
|
|
||||||
codecov-token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
|
||||||
pull-request-change-detection: 'true'
|
|
||||||
testing-type: units
|
|
||||||
|
|
||||||
integration:
|
|
||||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
|
||||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
|
||||||
# image for these stable branches. The list of branches where this is necessary will
|
|
||||||
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
|
|
||||||
# for the latest list.
|
|
||||||
runs-on: >-
|
|
||||||
${{ contains(fromJson(
|
|
||||||
'["2.9", "2.10", "2.11"]'
|
|
||||||
), matrix.ansible) && 'ubuntu-20.04' || 'ubuntu-latest' }}
|
|
||||||
name: EOL I (Ⓐ${{ matrix.ansible }}+${{ matrix.docker }}+py${{ matrix.python }}:${{ matrix.target }})
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
ansible:
|
|
||||||
- ''
|
|
||||||
docker:
|
|
||||||
- ''
|
|
||||||
python:
|
|
||||||
- ''
|
|
||||||
target:
|
|
||||||
- ''
|
|
||||||
exclude:
|
|
||||||
- ansible: ''
|
|
||||||
include:
|
|
||||||
# 2.9
|
|
||||||
- ansible: '2.9'
|
|
||||||
docker: ubuntu1804
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/1/
|
|
||||||
- ansible: '2.9'
|
|
||||||
docker: ubuntu1804
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/2/
|
|
||||||
- ansible: '2.9'
|
|
||||||
docker: default
|
|
||||||
python: '2.7'
|
|
||||||
target: azp/generic/1/
|
|
||||||
- ansible: '2.9'
|
|
||||||
docker: default
|
|
||||||
python: '2.7'
|
|
||||||
target: azp/generic/2/
|
|
||||||
# 2.10
|
|
||||||
- ansible: '2.10'
|
|
||||||
docker: centos6
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/1/
|
|
||||||
- ansible: '2.10'
|
|
||||||
docker: centos6
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/2/
|
|
||||||
- ansible: '2.10'
|
|
||||||
docker: default
|
|
||||||
python: '3.6'
|
|
||||||
target: azp/generic/1/
|
|
||||||
- ansible: '2.10'
|
|
||||||
docker: default
|
|
||||||
python: '3.6'
|
|
||||||
target: azp/generic/2/
|
|
||||||
# 2.11
|
|
||||||
- ansible: '2.11'
|
|
||||||
docker: alpine3
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/1/
|
|
||||||
- ansible: '2.11'
|
|
||||||
docker: alpine3
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/2/
|
|
||||||
- ansible: '2.11'
|
|
||||||
docker: default
|
|
||||||
python: '3.8'
|
|
||||||
target: azp/generic/1/
|
|
||||||
- ansible: '2.11'
|
|
||||||
docker: default
|
|
||||||
python: '3.8'
|
|
||||||
target: azp/generic/2/
|
|
||||||
# 2.12
|
|
||||||
- ansible: '2.12'
|
|
||||||
docker: centos6
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/1/
|
|
||||||
- ansible: '2.12'
|
|
||||||
docker: centos6
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/2/
|
|
||||||
- ansible: '2.12'
|
|
||||||
docker: fedora33
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/1/
|
|
||||||
- ansible: '2.12'
|
|
||||||
docker: fedora33
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/2/
|
|
||||||
- ansible: '2.12'
|
|
||||||
docker: default
|
|
||||||
python: '2.6'
|
|
||||||
target: azp/generic/1/
|
|
||||||
- ansible: '2.12'
|
|
||||||
docker: default
|
|
||||||
python: '3.9'
|
|
||||||
target: azp/generic/2/
|
|
||||||
# 2.13
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: opensuse15py2
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/1/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: opensuse15py2
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/2/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: fedora35
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/1/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: fedora35
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/2/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: fedora34
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/1/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: fedora34
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/2/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: ubuntu1804
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/1/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: ubuntu1804
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/2/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: alpine3
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/1/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: alpine3
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/2/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: default
|
|
||||||
python: '3.8'
|
|
||||||
target: azp/generic/1/
|
|
||||||
- ansible: '2.13'
|
|
||||||
docker: default
|
|
||||||
python: '3.8'
|
|
||||||
target: azp/generic/2/
|
|
||||||
# 2.14
|
|
||||||
- ansible: '2.14'
|
|
||||||
docker: ubuntu2004
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/1/
|
|
||||||
- ansible: '2.14'
|
|
||||||
docker: ubuntu2004
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/2/
|
|
||||||
- ansible: '2.14'
|
|
||||||
docker: default
|
|
||||||
python: '3.9'
|
|
||||||
target: azp/generic/1/
|
|
||||||
- ansible: '2.14'
|
|
||||||
docker: default
|
|
||||||
python: '3.9'
|
|
||||||
target: azp/generic/2/
|
|
||||||
# 2.15
|
|
||||||
- ansible: '2.15'
|
|
||||||
docker: fedora37
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/1/
|
|
||||||
- ansible: '2.15'
|
|
||||||
docker: fedora37
|
|
||||||
python: ''
|
|
||||||
target: azp/posix/2/
|
|
||||||
- ansible: '2.15'
|
|
||||||
docker: default
|
|
||||||
python: '3.5'
|
|
||||||
target: azp/generic/1/
|
|
||||||
- ansible: '2.15'
|
|
||||||
docker: default
|
|
||||||
python: '3.5'
|
|
||||||
target: azp/generic/2/
|
|
||||||
- ansible: '2.15'
|
|
||||||
docker: default
|
|
||||||
python: '3.10'
|
|
||||||
target: azp/generic/1/
|
|
||||||
- ansible: '2.15'
|
|
||||||
docker: default
|
|
||||||
python: '3.10'
|
|
||||||
target: azp/generic/2/
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: >-
|
|
||||||
Perform integration testing against
|
|
||||||
Ansible version ${{ matrix.ansible }}
|
|
||||||
under Python ${{ matrix.python }}
|
|
||||||
uses: felixfontein/ansible-test-gh-action@main
|
|
||||||
with:
|
|
||||||
ansible-core-github-repository-slug: ${{ contains(fromJson('["2.9", "2.10", "2.11"]'), matrix.ansible) && 'ansible-community/eol-ansible' || 'ansible/ansible' }}
|
|
||||||
ansible-core-version: stable-${{ matrix.ansible }}
|
|
||||||
codecov-token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
|
||||||
docker-image: ${{ matrix.docker }}
|
|
||||||
integration-continue-on-error: 'false'
|
|
||||||
integration-diff: 'false'
|
|
||||||
integration-retry-on-error: 'true'
|
|
||||||
pre-test-cmd: >-
|
|
||||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
|
|
||||||
;
|
|
||||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.general.git ../../community/general
|
|
||||||
pull-request-change-detection: 'true'
|
|
||||||
target: ${{ matrix.target }}
|
|
||||||
target-python-version: ${{ matrix.python }}
|
|
||||||
testing-type: integration
|
|
||||||
2
.github/workflows/docs-pr.yml
vendored
2
.github/workflows/docs-pr.yml
vendored
@@ -7,7 +7,7 @@ name: Collection Docs
|
|||||||
concurrency:
|
concurrency:
|
||||||
group: docs-pr-${{ github.head_ref }}
|
group: docs-pr-${{ github.head_ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
on:
|
'on':
|
||||||
pull_request_target:
|
pull_request_target:
|
||||||
types: [opened, synchronize, reopened, closed]
|
types: [opened, synchronize, reopened, closed]
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/docs-push.yml
vendored
2
.github/workflows/docs-push.yml
vendored
@@ -7,7 +7,7 @@ name: Collection Docs
|
|||||||
concurrency:
|
concurrency:
|
||||||
group: docs-push-${{ github.sha }}
|
group: docs-push-${{ github.sha }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
on:
|
'on':
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
|
|||||||
46
.github/workflows/ee.yml
vendored
46
.github/workflows/ee.yml
vendored
@@ -4,17 +4,17 @@
|
|||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
name: execution environment
|
name: execution environment
|
||||||
on:
|
'on':
|
||||||
# Run CI against all pushes (direct commits, also merged PRs), Pull Requests
|
# Run CI against all pushes (direct commits, also merged PRs), Pull Requests
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- stable-*
|
- stable-*
|
||||||
pull_request:
|
pull_request:
|
||||||
# Run CI once per day (at 04:45 UTC)
|
# Run CI once per day (at 09:00 UTC)
|
||||||
# This ensures that even if there haven't been commits that we are still testing against latest version of ansible-builder
|
# This ensures that even if there haven't been commits that we are still testing against latest version of ansible-builder
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '45 4 * * *'
|
- cron: '0 9 * * *'
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NAMESPACE: community
|
NAMESPACE: community
|
||||||
@@ -48,36 +48,28 @@ jobs:
|
|||||||
ansible_runner: ansible-runner
|
ansible_runner: ansible-runner
|
||||||
other_deps: |2
|
other_deps: |2
|
||||||
python_interpreter:
|
python_interpreter:
|
||||||
package_system: python3.11 python3.11-pip python3.11-wheel python3.11-cryptography
|
package_system: python3.12 python3.12-pip python3.12-wheel python3.12-cryptography
|
||||||
python_path: "/usr/bin/python3.11"
|
python_path: "/usr/bin/python3.12"
|
||||||
base_image: docker.io/redhat/ubi9:latest
|
base_image: docker.io/redhat/ubi9:latest
|
||||||
pre_base: '"#"'
|
pre_base: '"#"'
|
||||||
# For some reason ansible-builder will not install EPEL dependencies on RHEL
|
- name: ansible-core 2.17 @ Rocky Linux 9
|
||||||
extra_vars: -e has_no_pyopenssl=true
|
ansible_core: https://github.com/ansible/ansible/archive/stable-2.17.tar.gz
|
||||||
- name: ansible-core 2.15 @ Rocky Linux 9
|
|
||||||
ansible_core: https://github.com/ansible/ansible/archive/stable-2.15.tar.gz
|
|
||||||
ansible_runner: ansible-runner
|
|
||||||
base_image: quay.io/rockylinux/rockylinux:9
|
|
||||||
pre_base: RUN dnf install -y epel-release
|
|
||||||
# For some reason ansible-builder will not install EPEL dependencies on Rocky Linux
|
|
||||||
extra_vars: -e has_no_pyopenssl=true
|
|
||||||
- name: ansible-core 2.14 @ CentOS Stream 9
|
|
||||||
ansible_core: https://github.com/ansible/ansible/archive/stable-2.14.tar.gz
|
|
||||||
ansible_runner: ansible-runner
|
|
||||||
base_image: quay.io/centos/centos:stream9
|
|
||||||
pre_base: RUN dnf install -y epel-release epel-next-release
|
|
||||||
# For some reason, PyOpenSSL is **broken** on CentOS Stream 9 / EPEL
|
|
||||||
extra_vars: -e has_no_pyopenssl=true
|
|
||||||
- name: ansible-core 2.13 @ RHEL UBI 8
|
|
||||||
ansible_core: https://github.com/ansible/ansible/archive/stable-2.13.tar.gz
|
|
||||||
ansible_runner: ansible-runner
|
ansible_runner: ansible-runner
|
||||||
other_deps: |2
|
other_deps: |2
|
||||||
python_interpreter:
|
python_interpreter:
|
||||||
package_system: python39 python39-pip python39-wheel python39-cryptography
|
package_system: python3.11 python3.11-pip python3.11-wheel python3.11-cryptography
|
||||||
base_image: docker.io/redhat/ubi8:latest
|
python_path: "/usr/bin/python3.11"
|
||||||
|
base_image: quay.io/rockylinux/rockylinux:9
|
||||||
|
pre_base: RUN dnf install -y epel-release
|
||||||
|
- name: ansible-core 2.18 @ CentOS Stream 9
|
||||||
|
ansible_core: https://github.com/ansible/ansible/archive/stable-2.18.tar.gz
|
||||||
|
ansible_runner: ansible-runner
|
||||||
|
other_deps: |2
|
||||||
|
python_interpreter:
|
||||||
|
package_system: python3.11 python3.11-pip python3.11-wheel python3.11-cryptography
|
||||||
|
python_path: "/usr/bin/python3.11"
|
||||||
|
base_image: quay.io/centos/centos:stream9
|
||||||
pre_base: '"#"'
|
pre_base: '"#"'
|
||||||
# We don't have PyOpenSSL for Python 3.9
|
|
||||||
extra_vars: -e has_no_pyopenssl=true
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
|
|||||||
20
.github/workflows/import-galaxy.yml
vendored
20
.github/workflows/import-galaxy.yml
vendored
@@ -1,20 +0,0 @@
|
|||||||
---
|
|
||||||
# Copyright (c) Ansible Project
|
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
||||||
|
|
||||||
name: import-galaxy
|
|
||||||
'on':
|
|
||||||
# Run CI against all pushes (direct commits, also merged PRs) to main, and all Pull Requests
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
- stable-*
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
import-galaxy:
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
name: Test to import built collection artifact with Galaxy importer
|
|
||||||
uses: ansible-community/github-action-test-galaxy-import/.github/workflows/test-galaxy-import.yml@main
|
|
||||||
28
.github/workflows/nox.yml
vendored
Normal file
28
.github/workflows/nox.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
---
|
||||||
|
# Copyright (c) Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
name: nox
|
||||||
|
'on':
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- stable-*
|
||||||
|
pull_request:
|
||||||
|
# Run CI once per day (at 09:00 UTC)
|
||||||
|
schedule:
|
||||||
|
- cron: '0 9 * * *'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
nox:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: "Run extra sanity tests"
|
||||||
|
steps:
|
||||||
|
- name: Check out collection
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
- name: Run nox
|
||||||
|
uses: ansible-community/antsibull-nox@main
|
||||||
38
.github/workflows/reuse.yml
vendored
38
.github/workflows/reuse.yml
vendored
@@ -1,38 +0,0 @@
|
|||||||
---
|
|
||||||
# Copyright (c) Ansible Project
|
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
|
||||||
|
|
||||||
name: Verify REUSE
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
- stable-*
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
- stable-*
|
|
||||||
# Run CI once per day (at 04:45 UTC)
|
|
||||||
schedule:
|
|
||||||
- cron: '45 4 * * *'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check:
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Remove some files before checking REUSE compliance
|
|
||||||
run: |
|
|
||||||
rm -f tests/integration/targets/*/files/*.pem
|
|
||||||
rm -f tests/integration/targets/*/files/roots/*.pem
|
|
||||||
|
|
||||||
- name: REUSE Compliance Check
|
|
||||||
uses: fsfe/reuse-action@v5
|
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -4,6 +4,7 @@
|
|||||||
|
|
||||||
# Community.crypt specific things
|
# Community.crypt specific things
|
||||||
/changelogs/.plugin-cache.yaml
|
/changelogs/.plugin-cache.yaml
|
||||||
|
/tests/integration/inventory
|
||||||
|
|
||||||
|
|
||||||
# Created by https://www.gitignore.io/api/git,linux,pydev,python,windows,pycharm+all,jupyternotebook,vim,webstorm,emacs,dotenv
|
# Created by https://www.gitignore.io/api/git,linux,pydev,python,windows,pycharm+all,jupyternotebook,vim,webstorm,emacs,dotenv
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
---
|
|
||||||
# Copyright (c) Ansible Project
|
# Copyright (c) Ansible Project
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
has_pyopenssl: true
|
[isort]
|
||||||
|
profile=black
|
||||||
|
lines_after_imports = 2
|
||||||
19
.mypy.ini
Normal file
19
.mypy.ini
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# Copyright (c) Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
[mypy]
|
||||||
|
# check_untyped_defs = True
|
||||||
|
# disallow_untyped_defs = True -- not yet feasible
|
||||||
|
|
||||||
|
# strict = True -- only try to enable once everything is typed
|
||||||
|
strict_equality = True
|
||||||
|
|
||||||
|
[mypy-ansible.*]
|
||||||
|
# ansible-core has no typing information
|
||||||
|
# ignore_missing_imports = True
|
||||||
|
follow_untyped_imports = True
|
||||||
|
|
||||||
|
[mypy-ansible_collections.community.internal_test_tools.*]
|
||||||
|
# community.internal_test_tools has no typing information
|
||||||
|
ignore_missing_imports = True
|
||||||
592
.pylintrc
Normal file
592
.pylintrc
Normal file
@@ -0,0 +1,592 @@
|
|||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
# SPDX-FileCopyrightText: 2025 Felix Fontein <felix@fontein.de>
|
||||||
|
|
||||||
|
[MAIN]
|
||||||
|
|
||||||
|
# Clear in-memory caches upon conclusion of linting. Useful if running pylint
|
||||||
|
# in a server-like mode.
|
||||||
|
clear-cache-post-run=no
|
||||||
|
|
||||||
|
# Load and enable all available extensions. Use --list-extensions to see a list
|
||||||
|
# all available extensions.
|
||||||
|
#enable-all-extensions=
|
||||||
|
|
||||||
|
# Specify a score threshold under which the program will exit with error.
|
||||||
|
fail-under=10
|
||||||
|
|
||||||
|
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
|
||||||
|
# number of processors available to use, and will cap the count on Windows to
|
||||||
|
# avoid hangs.
|
||||||
|
jobs=0
|
||||||
|
|
||||||
|
# Minimum Python version to use for version dependent checks. Will default to
|
||||||
|
# the version used to run pylint.
|
||||||
|
py-version=3.7
|
||||||
|
|
||||||
|
# Allow loading of arbitrary C extensions. Extensions are imported into the
|
||||||
|
# active Python interpreter and may run arbitrary code.
|
||||||
|
unsafe-load-any-extension=no
|
||||||
|
|
||||||
|
# In verbose mode, extra non-checker-related info will be displayed.
|
||||||
|
#verbose=
|
||||||
|
|
||||||
|
|
||||||
|
[BASIC]
|
||||||
|
|
||||||
|
# Naming style matching correct argument names.
|
||||||
|
argument-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct argument names. Overrides argument-
|
||||||
|
# naming-style. If left empty, argument names will be checked with the set
|
||||||
|
# naming style.
|
||||||
|
#argument-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct attribute names.
|
||||||
|
attr-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct attribute names. Overrides attr-naming-
|
||||||
|
# style. If left empty, attribute names will be checked with the set naming
|
||||||
|
# style.
|
||||||
|
#attr-rgx=
|
||||||
|
|
||||||
|
# Bad variable names which should always be refused, separated by a comma.
|
||||||
|
bad-names=foo,
|
||||||
|
bar,
|
||||||
|
baz,
|
||||||
|
toto,
|
||||||
|
tutu,
|
||||||
|
tata
|
||||||
|
|
||||||
|
# Bad variable names regexes, separated by a comma. If names match any regex,
|
||||||
|
# they will always be refused
|
||||||
|
bad-names-rgxs=
|
||||||
|
|
||||||
|
# Naming style matching correct class attribute names.
|
||||||
|
class-attribute-naming-style=any
|
||||||
|
|
||||||
|
# Regular expression matching correct class attribute names. Overrides class-
|
||||||
|
# attribute-naming-style. If left empty, class attribute names will be checked
|
||||||
|
# with the set naming style.
|
||||||
|
#class-attribute-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct class constant names.
|
||||||
|
class-const-naming-style=UPPER_CASE
|
||||||
|
|
||||||
|
# Regular expression matching correct class constant names. Overrides class-
|
||||||
|
# const-naming-style. If left empty, class constant names will be checked with
|
||||||
|
# the set naming style.
|
||||||
|
#class-const-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct class names.
|
||||||
|
class-naming-style=PascalCase
|
||||||
|
|
||||||
|
# Regular expression matching correct class names. Overrides class-naming-
|
||||||
|
# style. If left empty, class names will be checked with the set naming style.
|
||||||
|
#class-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct constant names.
|
||||||
|
const-naming-style=UPPER_CASE
|
||||||
|
|
||||||
|
# Regular expression matching correct constant names. Overrides const-naming-
|
||||||
|
# style. If left empty, constant names will be checked with the set naming
|
||||||
|
# style.
|
||||||
|
#const-rgx=
|
||||||
|
|
||||||
|
# Minimum line length for functions/classes that require docstrings, shorter
|
||||||
|
# ones are exempt.
|
||||||
|
docstring-min-length=-1
|
||||||
|
|
||||||
|
# Naming style matching correct function names.
|
||||||
|
function-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct function names. Overrides function-
|
||||||
|
# naming-style. If left empty, function names will be checked with the set
|
||||||
|
# naming style.
|
||||||
|
#function-rgx=
|
||||||
|
|
||||||
|
# Good variable names which should always be accepted, separated by a comma.
|
||||||
|
good-names=i,
|
||||||
|
j,
|
||||||
|
k,
|
||||||
|
ex,
|
||||||
|
Run,
|
||||||
|
_
|
||||||
|
|
||||||
|
# Good variable names regexes, separated by a comma. If names match any regex,
|
||||||
|
# they will always be accepted
|
||||||
|
good-names-rgxs=
|
||||||
|
|
||||||
|
# Include a hint for the correct naming format with invalid-name.
|
||||||
|
include-naming-hint=no
|
||||||
|
|
||||||
|
# Naming style matching correct inline iteration names.
|
||||||
|
inlinevar-naming-style=any
|
||||||
|
|
||||||
|
# Regular expression matching correct inline iteration names. Overrides
|
||||||
|
# inlinevar-naming-style. If left empty, inline iteration names will be checked
|
||||||
|
# with the set naming style.
|
||||||
|
#inlinevar-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct method names.
|
||||||
|
method-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct method names. Overrides method-naming-
|
||||||
|
# style. If left empty, method names will be checked with the set naming style.
|
||||||
|
#method-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct module names.
|
||||||
|
module-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct module names. Overrides module-naming-
|
||||||
|
# style. If left empty, module names will be checked with the set naming style.
|
||||||
|
#module-rgx=
|
||||||
|
|
||||||
|
# Colon-delimited sets of names that determine each other's naming style when
|
||||||
|
# the name regexes allow several styles.
|
||||||
|
name-group=
|
||||||
|
|
||||||
|
# Regular expression which should only match function or class names that do
|
||||||
|
# not require a docstring.
|
||||||
|
no-docstring-rgx=^_
|
||||||
|
|
||||||
|
# List of decorators that produce properties, such as abc.abstractproperty. Add
|
||||||
|
# to this list to register other decorators that produce valid properties.
|
||||||
|
# These decorators are taken in consideration only for invalid-name.
|
||||||
|
property-classes=abc.abstractproperty
|
||||||
|
|
||||||
|
# Regular expression matching correct type alias names. If left empty, type
|
||||||
|
# alias names will be checked with the set naming style.
|
||||||
|
#typealias-rgx=
|
||||||
|
|
||||||
|
# Regular expression matching correct type variable names. If left empty, type
|
||||||
|
# variable names will be checked with the set naming style.
|
||||||
|
#typevar-rgx=
|
||||||
|
|
||||||
|
# Naming style matching correct variable names.
|
||||||
|
variable-naming-style=snake_case
|
||||||
|
|
||||||
|
# Regular expression matching correct variable names. Overrides variable-
|
||||||
|
# naming-style. If left empty, variable names will be checked with the set
|
||||||
|
# naming style.
|
||||||
|
#variable-rgx=
|
||||||
|
|
||||||
|
|
||||||
|
[CLASSES]
|
||||||
|
|
||||||
|
# Warn about protected attribute access inside special methods
|
||||||
|
check-protected-access-in-special-methods=no
|
||||||
|
|
||||||
|
# List of method names used to declare (i.e. assign) instance attributes.
|
||||||
|
defining-attr-methods=__init__,
|
||||||
|
__new__,
|
||||||
|
setUp,
|
||||||
|
asyncSetUp,
|
||||||
|
__post_init__
|
||||||
|
|
||||||
|
# List of member names, which should be excluded from the protected access
|
||||||
|
# warning.
|
||||||
|
exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit
|
||||||
|
|
||||||
|
# List of valid names for the first argument in a class method.
|
||||||
|
valid-classmethod-first-arg=cls
|
||||||
|
|
||||||
|
# List of valid names for the first argument in a metaclass class method.
|
||||||
|
valid-metaclass-classmethod-first-arg=mcs
|
||||||
|
|
||||||
|
|
||||||
|
[DESIGN]
|
||||||
|
|
||||||
|
# List of regular expressions of class ancestor names to ignore when counting
|
||||||
|
# public methods (see R0903)
|
||||||
|
exclude-too-few-public-methods=
|
||||||
|
|
||||||
|
# List of qualified class names to ignore when counting class parents (see
|
||||||
|
# R0901)
|
||||||
|
ignored-parents=
|
||||||
|
|
||||||
|
# Maximum number of arguments for function / method.
|
||||||
|
max-args=5
|
||||||
|
|
||||||
|
# Maximum number of attributes for a class (see R0902).
|
||||||
|
max-attributes=7
|
||||||
|
|
||||||
|
# Maximum number of boolean expressions in an if statement (see R0916).
|
||||||
|
max-bool-expr=5
|
||||||
|
|
||||||
|
# Maximum number of branch for function / method body.
|
||||||
|
max-branches=12
|
||||||
|
|
||||||
|
# Maximum number of locals for function / method body.
|
||||||
|
max-locals=15
|
||||||
|
|
||||||
|
# Maximum number of parents for a class (see R0901).
|
||||||
|
max-parents=7
|
||||||
|
|
||||||
|
# Maximum number of positional arguments for function / method.
|
||||||
|
max-positional-arguments=5
|
||||||
|
|
||||||
|
# Maximum number of public methods for a class (see R0904).
|
||||||
|
max-public-methods=20
|
||||||
|
|
||||||
|
# Maximum number of return / yield for function / method body.
|
||||||
|
max-returns=6
|
||||||
|
|
||||||
|
# Maximum number of statements in function / method body.
|
||||||
|
max-statements=50
|
||||||
|
|
||||||
|
# Minimum number of public methods for a class (see R0903).
|
||||||
|
min-public-methods=2
|
||||||
|
|
||||||
|
|
||||||
|
[EXCEPTIONS]
|
||||||
|
|
||||||
|
# Exceptions that will emit a warning when caught.
|
||||||
|
overgeneral-exceptions=builtins.BaseException,builtins.Exception
|
||||||
|
|
||||||
|
|
||||||
|
[FORMAT]
|
||||||
|
|
||||||
|
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||||
|
expected-line-ending-format=
|
||||||
|
|
||||||
|
# Regexp for a line that is allowed to be longer than the limit.
|
||||||
|
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||||
|
|
||||||
|
# Number of spaces of indent required inside a hanging or continued line.
|
||||||
|
indent-after-paren=4
|
||||||
|
|
||||||
|
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||||
|
# tab).
|
||||||
|
indent-string=' '
|
||||||
|
|
||||||
|
# Maximum number of characters on a single line.
|
||||||
|
max-line-length=160
|
||||||
|
|
||||||
|
# Maximum number of lines in a module.
|
||||||
|
max-module-lines=1000
|
||||||
|
|
||||||
|
# Allow the body of a class to be on the same line as the declaration if body
|
||||||
|
# contains single statement.
|
||||||
|
single-line-class-stmt=no
|
||||||
|
|
||||||
|
# Allow the body of an if to be on the same line as the test if there is no
|
||||||
|
# else.
|
||||||
|
single-line-if-stmt=no
|
||||||
|
|
||||||
|
|
||||||
|
[IMPORTS]
|
||||||
|
|
||||||
|
# List of modules that can be imported at any level, not just the top level
|
||||||
|
# one.
|
||||||
|
allow-any-import-level=
|
||||||
|
|
||||||
|
# Allow explicit reexports by alias from a package __init__.
|
||||||
|
allow-reexport-from-package=no
|
||||||
|
|
||||||
|
# Allow wildcard imports from modules that define __all__.
|
||||||
|
allow-wildcard-with-all=no
|
||||||
|
|
||||||
|
# Deprecated modules which should not be used, separated by a comma.
|
||||||
|
deprecated-modules=
|
||||||
|
|
||||||
|
# Output a graph (.gv or any supported image format) of external dependencies
|
||||||
|
# to the given file (report RP0402 must not be disabled).
|
||||||
|
ext-import-graph=
|
||||||
|
|
||||||
|
# Output a graph (.gv or any supported image format) of all (i.e. internal and
|
||||||
|
# external) dependencies to the given file (report RP0402 must not be
|
||||||
|
# disabled).
|
||||||
|
import-graph=
|
||||||
|
|
||||||
|
# Output a graph (.gv or any supported image format) of internal dependencies
|
||||||
|
# to the given file (report RP0402 must not be disabled).
|
||||||
|
int-import-graph=
|
||||||
|
|
||||||
|
# Force import order to recognize a module as part of the standard
|
||||||
|
# compatibility libraries.
|
||||||
|
known-standard-library=
|
||||||
|
|
||||||
|
# Force import order to recognize a module as part of a third party library.
|
||||||
|
known-third-party=enchant
|
||||||
|
|
||||||
|
# Couples of modules and preferred modules, separated by a comma.
|
||||||
|
preferred-modules=
|
||||||
|
|
||||||
|
|
||||||
|
[LOGGING]
|
||||||
|
|
||||||
|
# The type of string formatting that logging methods do. `old` means using %
|
||||||
|
# formatting, `new` is for `{}` formatting.
|
||||||
|
logging-format-style=old
|
||||||
|
|
||||||
|
# Logging modules to check that the string format arguments are in logging
|
||||||
|
# function parameter format.
|
||||||
|
logging-modules=logging
|
||||||
|
|
||||||
|
|
||||||
|
[MESSAGES CONTROL]
|
||||||
|
|
||||||
|
# Only show warnings with the listed confidence levels. Leave empty to show
|
||||||
|
# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
|
||||||
|
# UNDEFINED.
|
||||||
|
confidence=HIGH,
|
||||||
|
CONTROL_FLOW,
|
||||||
|
INFERENCE,
|
||||||
|
INFERENCE_FAILURE,
|
||||||
|
UNDEFINED
|
||||||
|
|
||||||
|
# Disable the message, report, category or checker with the given id(s). You
|
||||||
|
# can either give multiple identifiers separated by comma (,) or put this
|
||||||
|
# option multiple times (only on the command line, not in the configuration
|
||||||
|
# file where it should appear only once). You can also use "--disable=all" to
|
||||||
|
# disable everything first and then re-enable specific checks. For example, if
|
||||||
|
# you want to run only the similarities checker, you can use "--disable=all
|
||||||
|
# --enable=similarities". If you want to run only the classes checker, but have
|
||||||
|
# no Warning level messages displayed, use "--disable=all --enable=classes
|
||||||
|
# --disable=W".
|
||||||
|
disable=raw-checker-failed,
|
||||||
|
bad-inline-option,
|
||||||
|
deprecated-pragma,
|
||||||
|
duplicate-code,
|
||||||
|
file-ignored,
|
||||||
|
import-outside-toplevel,
|
||||||
|
missing-class-docstring,
|
||||||
|
missing-function-docstring,
|
||||||
|
missing-module-docstring,
|
||||||
|
locally-disabled,
|
||||||
|
suppressed-message,
|
||||||
|
use-implicit-booleaness-not-comparison,
|
||||||
|
use-implicit-booleaness-not-comparison-to-string,
|
||||||
|
use-implicit-booleaness-not-comparison-to-zero,
|
||||||
|
superfluous-parens,
|
||||||
|
too-few-public-methods,
|
||||||
|
too-many-arguments,
|
||||||
|
too-many-boolean-expressions,
|
||||||
|
too-many-branches,
|
||||||
|
too-many-function-args,
|
||||||
|
too-many-instance-attributes,
|
||||||
|
too-many-lines,
|
||||||
|
too-many-locals,
|
||||||
|
too-many-nested-blocks,
|
||||||
|
too-many-positional-arguments,
|
||||||
|
too-many-return-statements,
|
||||||
|
too-many-statements,
|
||||||
|
ungrouped-imports,
|
||||||
|
useless-parent-delegation,
|
||||||
|
wrong-import-order,
|
||||||
|
wrong-import-position,
|
||||||
|
# To clean up:
|
||||||
|
broad-exception-caught,
|
||||||
|
broad-exception-raised,
|
||||||
|
fixme,
|
||||||
|
invalid-name,
|
||||||
|
unused-argument,
|
||||||
|
# Cannot remove yet due to inadequacy of rules
|
||||||
|
inconsistent-return-statements, # doesn't notice that fail_json() does not return
|
||||||
|
|
||||||
|
# Enable the message, report, category or checker with the given id(s). You can
|
||||||
|
# either give multiple identifier separated by comma (,) or put this option
|
||||||
|
# multiple time (only on the command line, not in the configuration file where
|
||||||
|
# it should appear only once). See also the "--disable" option for examples.
|
||||||
|
enable=
|
||||||
|
|
||||||
|
|
||||||
|
[METHOD_ARGS]
|
||||||
|
|
||||||
|
# List of qualified names (i.e., library.method) which require a timeout
|
||||||
|
# parameter e.g. 'requests.api.get,requests.api.post'
|
||||||
|
timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
|
||||||
|
|
||||||
|
|
||||||
|
[MISCELLANEOUS]
|
||||||
|
|
||||||
|
# List of note tags to take in consideration, separated by a comma.
|
||||||
|
notes=FIXME,
|
||||||
|
XXX,
|
||||||
|
TODO
|
||||||
|
|
||||||
|
# Regular expression of note tags to take in consideration.
|
||||||
|
notes-rgx=
|
||||||
|
|
||||||
|
|
||||||
|
[REFACTORING]
|
||||||
|
|
||||||
|
# Maximum number of nested blocks for function / method body
|
||||||
|
max-nested-blocks=5
|
||||||
|
|
||||||
|
# Complete name of functions that never returns. When checking for
|
||||||
|
# inconsistent-return-statements if a never returning function is called then
|
||||||
|
# it will be considered as an explicit return statement and no message will be
|
||||||
|
# printed.
|
||||||
|
never-returning-functions=sys.exit,argparse.parse_error
|
||||||
|
|
||||||
|
# Let 'consider-using-join' be raised when the separator to join on would be
|
||||||
|
# non-empty (resulting in expected fixes of the type: ``"- " + " -
|
||||||
|
# ".join(items)``)
|
||||||
|
suggest-join-with-non-empty-separator=yes
|
||||||
|
|
||||||
|
|
||||||
|
[REPORTS]
|
||||||
|
|
||||||
|
# Python expression which should return a score less than or equal to 10. You
|
||||||
|
# have access to the variables 'fatal', 'error', 'warning', 'refactor',
|
||||||
|
# 'convention', and 'info' which contain the number of messages in each
|
||||||
|
# category, as well as 'statement' which is the total number of statements
|
||||||
|
# analyzed. This score is used by the global evaluation report (RP0004).
|
||||||
|
evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
|
||||||
|
|
||||||
|
# Template used to display messages. This is a python new-style format string
|
||||||
|
# used to format the message information. See doc for all details.
|
||||||
|
msg-template=
|
||||||
|
|
||||||
|
# Set the output format. Available formats are: text, parseable, colorized,
|
||||||
|
# json2 (improved json format), json (old json format) and msvs (visual
|
||||||
|
# studio). You can also give a reporter class, e.g.
|
||||||
|
# mypackage.mymodule.MyReporterClass.
|
||||||
|
#output-format=
|
||||||
|
|
||||||
|
# Tells whether to display a full report or only the messages.
|
||||||
|
reports=no
|
||||||
|
|
||||||
|
# Activate the evaluation score.
|
||||||
|
score=yes
|
||||||
|
|
||||||
|
|
||||||
|
[SIMILARITIES]
|
||||||
|
|
||||||
|
# Comments are removed from the similarity computation
|
||||||
|
ignore-comments=yes
|
||||||
|
|
||||||
|
# Docstrings are removed from the similarity computation
|
||||||
|
ignore-docstrings=yes
|
||||||
|
|
||||||
|
# Imports are removed from the similarity computation
|
||||||
|
ignore-imports=yes
|
||||||
|
|
||||||
|
# Signatures are removed from the similarity computation
|
||||||
|
ignore-signatures=yes
|
||||||
|
|
||||||
|
# Minimum lines number of a similarity.
|
||||||
|
min-similarity-lines=4
|
||||||
|
|
||||||
|
|
||||||
|
[SPELLING]
|
||||||
|
|
||||||
|
# Limits count of emitted suggestions for spelling mistakes.
|
||||||
|
max-spelling-suggestions=4
|
||||||
|
|
||||||
|
# Spelling dictionary name. No available dictionaries : You need to install
|
||||||
|
# both the python package and the system dependency for enchant to work.
|
||||||
|
spelling-dict=
|
||||||
|
|
||||||
|
# List of comma separated words that should be considered directives if they
|
||||||
|
# appear at the beginning of a comment and should not be checked.
|
||||||
|
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
|
||||||
|
|
||||||
|
# List of comma separated words that should not be checked.
|
||||||
|
spelling-ignore-words=
|
||||||
|
|
||||||
|
# A path to a file that contains the private dictionary; one word per line.
|
||||||
|
spelling-private-dict-file=
|
||||||
|
|
||||||
|
# Tells whether to store unknown words to the private dictionary (see the
|
||||||
|
# --spelling-private-dict-file option) instead of raising a message.
|
||||||
|
spelling-store-unknown-words=no
|
||||||
|
|
||||||
|
|
||||||
|
[STRING]
|
||||||
|
|
||||||
|
# This flag controls whether inconsistent-quotes generates a warning when the
|
||||||
|
# character used as a quote delimiter is used inconsistently within a module.
|
||||||
|
check-quote-consistency=no
|
||||||
|
|
||||||
|
# This flag controls whether the implicit-str-concat should generate a warning
|
||||||
|
# on implicit string concatenation in sequences defined over several lines.
|
||||||
|
check-str-concat-over-line-jumps=no
|
||||||
|
|
||||||
|
|
||||||
|
[TYPECHECK]
|
||||||
|
|
||||||
|
# List of decorators that produce context managers, such as
|
||||||
|
# contextlib.contextmanager. Add to this list to register other decorators that
|
||||||
|
# produce valid context managers.
|
||||||
|
contextmanager-decorators=contextlib.contextmanager
|
||||||
|
|
||||||
|
# List of members which are set dynamically and missed by pylint inference
|
||||||
|
# system, and so shouldn't trigger E1101 when accessed. Python regular
|
||||||
|
# expressions are accepted.
|
||||||
|
generated-members=
|
||||||
|
|
||||||
|
# Tells whether to warn about missing members when the owner of the attribute
|
||||||
|
# is inferred to be None.
|
||||||
|
ignore-none=yes
|
||||||
|
|
||||||
|
# This flag controls whether pylint should warn about no-member and similar
|
||||||
|
# checks whenever an opaque object is returned when inferring. The inference
|
||||||
|
# can return multiple potential results while evaluating a Python object, but
|
||||||
|
# some branches might not be evaluated, which results in partial inference. In
|
||||||
|
# that case, it might be useful to still emit no-member and other checks for
|
||||||
|
# the rest of the inferred objects.
|
||||||
|
ignore-on-opaque-inference=yes
|
||||||
|
|
||||||
|
# List of symbolic message names to ignore for Mixin members.
|
||||||
|
ignored-checks-for-mixins=no-member,
|
||||||
|
not-async-context-manager,
|
||||||
|
not-context-manager,
|
||||||
|
attribute-defined-outside-init
|
||||||
|
|
||||||
|
# List of class names for which member attributes should not be checked (useful
|
||||||
|
# for classes with dynamically set attributes). This supports the use of
|
||||||
|
# qualified names.
|
||||||
|
ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
|
||||||
|
|
||||||
|
# Show a hint with possible names when a member name was not found. The aspect
|
||||||
|
# of finding the hint is based on edit distance.
|
||||||
|
missing-member-hint=yes
|
||||||
|
|
||||||
|
# The minimum edit distance a name should have in order to be considered a
|
||||||
|
# similar match for a missing member name.
|
||||||
|
missing-member-hint-distance=1
|
||||||
|
|
||||||
|
# The total number of similar names that should be taken in consideration when
|
||||||
|
# showing a hint for a missing member.
|
||||||
|
missing-member-max-choices=1
|
||||||
|
|
||||||
|
# Regex pattern to define which classes are considered mixins.
|
||||||
|
mixin-class-rgx=.*[Mm]ixin
|
||||||
|
|
||||||
|
# List of decorators that change the signature of a decorated function.
|
||||||
|
signature-mutators=
|
||||||
|
|
||||||
|
|
||||||
|
[VARIABLES]
|
||||||
|
|
||||||
|
# List of additional names supposed to be defined in builtins. Remember that
|
||||||
|
# you should avoid defining new builtins when possible.
|
||||||
|
additional-builtins=
|
||||||
|
|
||||||
|
# Tells whether unused global variables should be treated as a violation.
|
||||||
|
allow-global-unused-variables=yes
|
||||||
|
|
||||||
|
# List of names allowed to shadow builtins
|
||||||
|
allowed-redefined-builtins=
|
||||||
|
|
||||||
|
# List of strings which can identify a callback function by name. A callback
|
||||||
|
# name must start or end with one of those strings.
|
||||||
|
callbacks=cb_,
|
||||||
|
_cb
|
||||||
|
|
||||||
|
# A regular expression matching the name of dummy variables (i.e. expected to
|
||||||
|
# not be used).
|
||||||
|
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
|
||||||
|
|
||||||
|
# Argument names that match this expression will be ignored.
|
||||||
|
ignored-argument-names=_.*|^ignored_|^unused_
|
||||||
|
|
||||||
|
# Tells whether we should check for unused import in __init__ files.
|
||||||
|
init-import=no
|
||||||
|
|
||||||
|
# List of qualified module names which can have objects that can redefine
|
||||||
|
# builtins.
|
||||||
|
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
|
||||||
|
|
||||||
Files: changelogs/fragments/*
|
|
||||||
Copyright: Ansible Project
|
|
||||||
License: GPL-3.0-or-later
|
|
||||||
53
.yamllint
Normal file
53
.yamllint
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
---
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
# SPDX-FileCopyrightText: 2025 Felix Fontein <felix@fontein.de>
|
||||||
|
|
||||||
|
extends: default
|
||||||
|
|
||||||
|
ignore: |
|
||||||
|
/changelogs/
|
||||||
|
|
||||||
|
rules:
|
||||||
|
line-length:
|
||||||
|
max: 300
|
||||||
|
level: error
|
||||||
|
document-start:
|
||||||
|
present: true
|
||||||
|
document-end: false
|
||||||
|
truthy:
|
||||||
|
level: error
|
||||||
|
allowed-values:
|
||||||
|
- 'true'
|
||||||
|
- 'false'
|
||||||
|
indentation:
|
||||||
|
spaces: 2
|
||||||
|
indent-sequences: true
|
||||||
|
key-duplicates: enable
|
||||||
|
trailing-spaces: enable
|
||||||
|
new-line-at-end-of-file: disable
|
||||||
|
hyphens:
|
||||||
|
max-spaces-after: 1
|
||||||
|
empty-lines:
|
||||||
|
max: 2
|
||||||
|
max-start: 0
|
||||||
|
max-end: 0
|
||||||
|
commas:
|
||||||
|
max-spaces-before: 0
|
||||||
|
min-spaces-after: 1
|
||||||
|
max-spaces-after: 1
|
||||||
|
colons:
|
||||||
|
max-spaces-before: 0
|
||||||
|
max-spaces-after: 1
|
||||||
|
brackets:
|
||||||
|
min-spaces-inside: 0
|
||||||
|
max-spaces-inside: 0
|
||||||
|
braces:
|
||||||
|
min-spaces-inside: 0
|
||||||
|
max-spaces-inside: 1
|
||||||
|
octal-values:
|
||||||
|
forbid-implicit-octal: true
|
||||||
|
forbid-explicit-octal: true
|
||||||
|
comments:
|
||||||
|
min-spaces-from-content: 1
|
||||||
|
comments-indentation: false
|
||||||
54
.yamllint-docs
Normal file
54
.yamllint-docs
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
---
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
# SPDX-FileCopyrightText: 2025 Felix Fontein <felix@fontein.de>
|
||||||
|
|
||||||
|
extends: default
|
||||||
|
|
||||||
|
ignore: |
|
||||||
|
/changelogs/
|
||||||
|
|
||||||
|
rules:
|
||||||
|
line-length:
|
||||||
|
max: 160
|
||||||
|
level: error
|
||||||
|
document-start:
|
||||||
|
present: false
|
||||||
|
document-end:
|
||||||
|
present: false
|
||||||
|
truthy:
|
||||||
|
level: error
|
||||||
|
allowed-values:
|
||||||
|
- 'true'
|
||||||
|
- 'false'
|
||||||
|
indentation:
|
||||||
|
spaces: 2
|
||||||
|
indent-sequences: true
|
||||||
|
key-duplicates: enable
|
||||||
|
trailing-spaces: enable
|
||||||
|
new-line-at-end-of-file: disable
|
||||||
|
hyphens:
|
||||||
|
max-spaces-after: 1
|
||||||
|
empty-lines:
|
||||||
|
max: 2
|
||||||
|
max-start: 0
|
||||||
|
max-end: 0
|
||||||
|
commas:
|
||||||
|
max-spaces-before: 0
|
||||||
|
min-spaces-after: 1
|
||||||
|
max-spaces-after: 1
|
||||||
|
colons:
|
||||||
|
max-spaces-before: 0
|
||||||
|
max-spaces-after: 1
|
||||||
|
brackets:
|
||||||
|
min-spaces-inside: 0
|
||||||
|
max-spaces-inside: 0
|
||||||
|
braces:
|
||||||
|
min-spaces-inside: 0
|
||||||
|
max-spaces-inside: 1
|
||||||
|
octal-values:
|
||||||
|
forbid-implicit-octal: true
|
||||||
|
forbid-explicit-octal: true
|
||||||
|
comments:
|
||||||
|
min-spaces-from-content: 1
|
||||||
|
comments-indentation: false
|
||||||
54
.yamllint-examples
Normal file
54
.yamllint-examples
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
---
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
# SPDX-FileCopyrightText: 2025 Felix Fontein <felix@fontein.de>
|
||||||
|
|
||||||
|
extends: default
|
||||||
|
|
||||||
|
ignore: |
|
||||||
|
/changelogs/
|
||||||
|
|
||||||
|
rules:
|
||||||
|
line-length:
|
||||||
|
max: 160
|
||||||
|
level: error
|
||||||
|
document-start:
|
||||||
|
present: true
|
||||||
|
document-end:
|
||||||
|
present: false
|
||||||
|
truthy:
|
||||||
|
level: error
|
||||||
|
allowed-values:
|
||||||
|
- 'true'
|
||||||
|
- 'false'
|
||||||
|
indentation:
|
||||||
|
spaces: 2
|
||||||
|
indent-sequences: true
|
||||||
|
key-duplicates: enable
|
||||||
|
trailing-spaces: enable
|
||||||
|
new-line-at-end-of-file: disable
|
||||||
|
hyphens:
|
||||||
|
max-spaces-after: 1
|
||||||
|
empty-lines:
|
||||||
|
max: 2
|
||||||
|
max-start: 0
|
||||||
|
max-end: 0
|
||||||
|
commas:
|
||||||
|
max-spaces-before: 0
|
||||||
|
min-spaces-after: 1
|
||||||
|
max-spaces-after: 1
|
||||||
|
colons:
|
||||||
|
max-spaces-before: 0
|
||||||
|
max-spaces-after: 1
|
||||||
|
brackets:
|
||||||
|
min-spaces-inside: 0
|
||||||
|
max-spaces-inside: 0
|
||||||
|
braces:
|
||||||
|
min-spaces-inside: 0
|
||||||
|
max-spaces-inside: 1
|
||||||
|
octal-values:
|
||||||
|
forbid-implicit-octal: true
|
||||||
|
forbid-explicit-octal: true
|
||||||
|
comments:
|
||||||
|
min-spaces-from-content: 1
|
||||||
|
comments-indentation: false
|
||||||
870
CHANGELOG.md
870
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -4,6 +4,98 @@ Community Crypto Release Notes
|
|||||||
|
|
||||||
.. contents:: Topics
|
.. contents:: Topics
|
||||||
|
|
||||||
|
v3.0.0-a1
|
||||||
|
=========
|
||||||
|
|
||||||
|
Release Summary
|
||||||
|
---------------
|
||||||
|
|
||||||
|
First pre-release for community.crypto 3.0.0.
|
||||||
|
This release drops compatibility for ansible-core before 2.17, for Python before 3.7, and for cryptography before 3.3.
|
||||||
|
|
||||||
|
Minor Changes
|
||||||
|
-------------
|
||||||
|
|
||||||
|
- No longer provide cryptography's ``backend`` parameter. This will break with cryptography < 3.1 (https://github.com/ansible-collections/community.crypto/pull/878).
|
||||||
|
- On cryptography 36.0.0+, always use ``public_bytes()`` for X.509 extension objects instead of using cryptography internals to obtain DER value of extension (https://github.com/ansible-collections/community.crypto/pull/878).
|
||||||
|
- Python code modernization: add type hints and type checking (https://github.com/ansible-collections/community.crypto/pull/885).
|
||||||
|
- Python code modernization: avoid unnecessary string conversion (https://github.com/ansible-collections/community.crypto/pull/880).
|
||||||
|
- Python code modernization: avoid using ``six`` (https://github.com/ansible-collections/community.crypto/pull/884).
|
||||||
|
- Python code modernization: remove Python 3 specific code (https://github.com/ansible-collections/community.crypto/pull/877).
|
||||||
|
- Python code modernization: update ``__future__`` imports, remove Python 2 specific boilerplates (https://github.com/ansible-collections/community.crypto/pull/876).
|
||||||
|
- Python code modernization: use ``unittest.mock`` instead of ``ansible_collections.community.internal_test_tools.tests.unit.compat.mock`` (https://github.com/ansible-collections/community.crypto/pull/881).
|
||||||
|
- Python code modernization: use f-strings instead of ``%`` and ``str.format()`` (https://github.com/ansible-collections/community.crypto/pull/875).
|
||||||
|
- Remove ``backend`` parameter from internal code whenever possible (https://github.com/ansible-collections/community.crypto/pull/883).
|
||||||
|
- Remove various compatibility code for cryptography < 3.3 (https://github.com/ansible-collections/community.crypto/pull/878).
|
||||||
|
- Remove vendored copy of ``distutils.version`` in favor of vendored copy included with ansible-core 2.12+ (https://github.com/ansible-collections/community.crypto/pull/371).
|
||||||
|
- acme_* modules - improve parsing of ``Retry-After`` reply headers in regular ACME requests (https://github.com/ansible-collections/community.crypto/pull/890).
|
||||||
|
- action_module plugin utils - remove compatibility with older ansible-core/ansible-base/Ansible versions (https://github.com/ansible-collections/community.crypto/pull/872).
|
||||||
|
- x509_certificate, x509_certificate_pipe - the ``ownca_version`` and ``selfsigned_version`` parameters explicitly only allow the value ``3``. The module already failed for other values in the past, now this is validated as part of the module argument spec (https://github.com/ansible-collections/community.crypto/pull/890).
|
||||||
|
|
||||||
|
Breaking Changes / Porting Guide
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
- All doc_fragments are now private to the collection and must not be used from other collections or unrelated plugins/modules. Breaking changes in these can happen at any time, even in bugfix releases (https://github.com/ansible-collections/community.crypto/pull/898).
|
||||||
|
- All module_utils and plugin_utils are now private to the collection and must not be used from other collections or unrelated plugins/modules. Breaking changes in these can happen at any time, even in bugfix releases (https://github.com/ansible-collections/community.crypto/pull/887).
|
||||||
|
- Ignore value of ``select_crypto_backend`` for all modules except acme_* and ..., and always assume the value ``auto``. This ensures that the ``cryptography`` version is always checked (https://github.com/ansible-collections/community.crypto/pull/883).
|
||||||
|
- The validation for relative timestamps is now more strict. A string starting with ``+`` or ``-`` must be valid, otherwise validation will fail. In the past such strings were often silently ignored, and in many cases the code which triggered the validation was not able to handle no result (https://github.com/ansible-collections/community.crypto/pull/885).
|
||||||
|
- acme.certificates module utils - the ``retrieve_acme_v1_certificate()`` helper function has been removed (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- get_certificate - the default for ``asn1_base64`` changed from ``false`` to ``true`` (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- x509_crl - the ``mode`` parameter no longer denotes the update mode, but the CRL file mode. Use ``crl_mode`` instead for the update mode (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
|
||||||
|
Deprecated Features
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
- acme_certificate - deprecate the ``agreement`` option which has no more effect. It will be removed from community.crypto 4.0.0 (https://github.com/ansible-collections/community.crypto/pull/891).
|
||||||
|
- openssl_pkcs12 - deprecate the ``maciter_size`` option which has no more effect. It will be removed from community.crypto 4.0.0 (https://github.com/ansible-collections/community.crypto/pull/891).
|
||||||
|
|
||||||
|
Removed Features (previously deprecated)
|
||||||
|
----------------------------------------
|
||||||
|
|
||||||
|
- The collection no longer supports cryptography < 3.3 (https://github.com/ansible-collections/community.crypto/pull/878, https://github.com/ansible-collections/community.crypto/pull/882).
|
||||||
|
- acme.acme module utils - the ``get_default_argspec()`` function has been removed. Use ``create_default_argspec()`` instead (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- acme.backends module utils - the methods ``get_ordered_csr_identifiers()`` and ``get_cert_information()`` of ``CryptoBackend`` now must be implemented (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- acme.documentation docs fragment - the ``documentation`` docs fragment has been removed. Use both the ``basic`` and ``account`` docs fragments in ``acme`` instead (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- acme_* modules - support for ACME v1 has been removed (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- community.crypto no longer supports Ansible 2.9, ansible-base 2.10, and ansible-core versions 2.11, 2.12, 2.13, 2.14, 2.15, and 2.16. While content from this collection might still work with some older versions of ansible-core, it will not work with any Python version before 3.7 (https://github.com/ansible-collections/community.crypto/pull/870).
|
||||||
|
- crypto.basic module utils - remove ``CRYPTOGRAPHY_HAS_*`` flags. All tested features are supported since cryptography 3.0 (https://github.com/ansible-collections/community.crypto/pull/878).
|
||||||
|
- crypto.cryptography_support module utils - remove ``cryptography_serial_number_of_cert()`` helper function (https://github.com/ansible-collections/community.crypto/pull/878).
|
||||||
|
- crypto.module_backends.common module utils - this module utils has been removed. Use the ``argspec`` module utils instead (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- crypto.support module utils - remove ``pyopenssl`` backend (https://github.com/ansible-collections/community.crypto/pull/874).
|
||||||
|
- execution environment dependencies - remove PyOpenSSL dependency (https://github.com/ansible-collections/community.crypto/pull/874).
|
||||||
|
- openssl_csr_pipe - the module now ignores check mode and will always behave as if check mode is not active (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- openssl_pkcs12 - support for the ``pyopenssl`` backend has been removed (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- openssl_privatekey_pipe - the module now ignores check mode and will always behave as if check mode is not active (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- time module utils - remove ``pyopenssl`` backend (https://github.com/ansible-collections/community.crypto/pull/874).
|
||||||
|
- x509_certificate_pipe - the module now ignores check mode and will always behave as if check mode is not active (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
|
||||||
|
v2.26.1
|
||||||
|
=======
|
||||||
|
|
||||||
|
Release Summary
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Bugfix and maintenance release with improved CI.
|
||||||
|
|
||||||
|
Bugfixes
|
||||||
|
--------
|
||||||
|
|
||||||
|
- luks_device - mark parameter ``passphrase_encoding`` as ``no_log=False`` to avoid confusing warning (https://github.com/ansible-collections/community.crypto/pull/867).
|
||||||
|
- luks_device - removing a specific keyslot with ``remove_keyslot`` caused the module to hang while cryptsetup was waiting for a passphrase from stdin, while the module did not supply one. Since a keyslot is not necessary, do not provide one (https://github.com/ansible-collections/community.crypto/issues/864, https://github.com/ansible-collections/community.crypto/pull/868).
|
||||||
|
|
||||||
|
v2.26.0
|
||||||
|
=======
|
||||||
|
|
||||||
|
Release Summary
|
||||||
|
---------------
|
||||||
|
|
||||||
|
Feature release.
|
||||||
|
|
||||||
|
Minor Changes
|
||||||
|
-------------
|
||||||
|
|
||||||
|
- openssl_pkcs12 - the module now supports ``certificate_content``/``other_certificates_content`` for cases where the data already exists in memory and not yet in a file (https://github.com/ansible-collections/community.crypto/issues/847, https://github.com/ansible-collections/community.crypto/pull/848).
|
||||||
|
|
||||||
v2.25.0
|
v2.25.0
|
||||||
=======
|
=======
|
||||||
|
|
||||||
|
|||||||
@@ -1,48 +0,0 @@
|
|||||||
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
|
||||||
--------------------------------------------
|
|
||||||
|
|
||||||
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
|
||||||
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
|
||||||
otherwise using this software ("Python") in source or binary form and
|
|
||||||
its associated documentation.
|
|
||||||
|
|
||||||
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
|
||||||
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
|
||||||
analyze, test, perform and/or display publicly, prepare derivative works,
|
|
||||||
distribute, and otherwise use Python alone or in any derivative version,
|
|
||||||
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
|
||||||
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
|
||||||
2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021 Python Software Foundation;
|
|
||||||
All Rights Reserved" are retained in Python alone or in any derivative version
|
|
||||||
prepared by Licensee.
|
|
||||||
|
|
||||||
3. In the event Licensee prepares a derivative work that is based on
|
|
||||||
or incorporates Python or any part thereof, and wants to make
|
|
||||||
the derivative work available to others as provided herein, then
|
|
||||||
Licensee hereby agrees to include in any such work a brief summary of
|
|
||||||
the changes made to Python.
|
|
||||||
|
|
||||||
4. PSF is making Python available to Licensee on an "AS IS"
|
|
||||||
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
|
||||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
|
||||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
|
||||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
|
||||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
|
||||||
|
|
||||||
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
|
||||||
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
|
||||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
|
||||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
|
||||||
|
|
||||||
6. This License Agreement will automatically terminate upon a material
|
|
||||||
breach of its terms and conditions.
|
|
||||||
|
|
||||||
7. Nothing in this License Agreement shall be deemed to create any
|
|
||||||
relationship of agency, partnership, or joint venture between PSF and
|
|
||||||
Licensee. This License Agreement does not grant permission to use PSF
|
|
||||||
trademarks or trade name in a trademark sense to endorse or promote
|
|
||||||
products or services of Licensee, or any third party.
|
|
||||||
|
|
||||||
8. By copying, installing or otherwise using Python, Licensee
|
|
||||||
agrees to be bound by the terms and conditions of this License
|
|
||||||
Agreement.
|
|
||||||
72
README.md
72
README.md
@@ -8,8 +8,9 @@ SPDX-License-Identifier: GPL-3.0-or-later
|
|||||||
|
|
||||||
[](https://docs.ansible.com/ansible/devel/collections/community/crypto/)
|
[](https://docs.ansible.com/ansible/devel/collections/community/crypto/)
|
||||||
[](https://dev.azure.com/ansible/community.crypto/_build?definitionId=21)
|
[](https://dev.azure.com/ansible/community.crypto/_build?definitionId=21)
|
||||||
[](https://github.com/ansible-collections/community.crypto/actions)
|
[](https://github.com/ansible-collections/community.crypto/actions)
|
||||||
[](https://codecov.io/gh/ansible-collections/community.crypto)
|
[](https://codecov.io/gh/ansible-collections/community.crypto)
|
||||||
|
[](https://api.reuse.software/info/github.com/ansible-collections/community.crypto)
|
||||||
|
|
||||||
Provides modules for [Ansible](https://www.ansible.com/community) for various cryptographic operations.
|
Provides modules for [Ansible](https://www.ansible.com/community) for various cryptographic operations.
|
||||||
|
|
||||||
@@ -38,13 +39,13 @@ For more information about communication, see the [Ansible communication guide](
|
|||||||
|
|
||||||
## Tested with Ansible
|
## Tested with Ansible
|
||||||
|
|
||||||
Tested with the current Ansible 2.9, ansible-base 2.10, ansible-core 2.11, ansible-core 2.12, ansible-core 2.13, ansible-core 2.14, ansible-core 2.15, ansible-core 2.16, ansible-core-2.17, and ansible-core 2.18 releases and the current development version of ansible-core. Ansible versions before 2.9.10 are not supported.
|
Tested with the current ansible-core-2.17, ansible-core 2.18, and ansible-core 2.19 releases and the current development version of ansible-core. Ansible-core versions before 2.17 are not supported; please use community.crypto 2.x.y with these.
|
||||||
|
|
||||||
## External requirements
|
## External requirements
|
||||||
|
|
||||||
The exact requirements for every module are listed in the module documentation.
|
The exact requirements for every module are listed in the module documentation.
|
||||||
|
|
||||||
Most modules require a recent enough version of [the Python cryptography library](https://pypi.org/project/cryptography/). See the module documentations for the minimal version supported for each module.
|
Most modules require a recent enough version of [the Python cryptography library](https://pypi.org/project/cryptography/); the minimum supported version by this collection is 3.3. See the module documentations for the minimal version supported for each module.
|
||||||
|
|
||||||
## Collection Documentation
|
## Collection Documentation
|
||||||
|
|
||||||
@@ -56,59 +57,6 @@ We also separately publish [**latest commit** collection documentation](https://
|
|||||||
|
|
||||||
If you use the Ansible package and do not update collections independently, use **latest**. If you install or update this collection directly from Galaxy, use **devel**. If you are looking to contribute, use **latest commit**.
|
If you use the Ansible package and do not update collections independently, use **latest**. If you install or update this collection directly from Galaxy, use **devel**. If you are looking to contribute, use **latest commit**.
|
||||||
|
|
||||||
## Included content
|
|
||||||
|
|
||||||
- OpenSSL / PKI modules and plugins:
|
|
||||||
- certificate_complete_chain module
|
|
||||||
- openssl_csr_info module and filter
|
|
||||||
- openssl_csr_pipe module
|
|
||||||
- openssl_csr module
|
|
||||||
- openssl_dhparam module
|
|
||||||
- openssl_pkcs12 module
|
|
||||||
- openssl_privatekey_convert module
|
|
||||||
- openssl_privatekey_info module and filter
|
|
||||||
- openssl_privatekey_pipe module
|
|
||||||
- openssl_privatekey module
|
|
||||||
- openssl_publickey_info module and filter
|
|
||||||
- openssl_publickey module
|
|
||||||
- openssl_signature_info module
|
|
||||||
- openssl_signature module
|
|
||||||
- split_pem filter
|
|
||||||
- x509_certificate_convert module
|
|
||||||
- x509_certificate_info module and filter
|
|
||||||
- x509_certificate_pipe module
|
|
||||||
- x509_certificate module
|
|
||||||
- x509_crl_info module and filter
|
|
||||||
- x509_crl module
|
|
||||||
- OpenSSH modules and plugins:
|
|
||||||
- openssh_cert module
|
|
||||||
- openssh_keypair module
|
|
||||||
- ACME modules and plugins:
|
|
||||||
- acme_account_info module
|
|
||||||
- acme_account module
|
|
||||||
- acme_ari_info module
|
|
||||||
- acme_certificate module
|
|
||||||
- acme_certificate_deactivate_authz module
|
|
||||||
- acme_certificate_order_create module
|
|
||||||
- acme_certificate_order_finalize module
|
|
||||||
- acme_certificate_order_info module
|
|
||||||
- acme_certificate_order_validate module
|
|
||||||
- acme_certificate_revoke module
|
|
||||||
- acme_challenge_cert_helper module
|
|
||||||
- acme_inspect module
|
|
||||||
- ECS modules and plugins:
|
|
||||||
- ecs_certificate module
|
|
||||||
- ecs_domain module
|
|
||||||
- GnuPG modules and plugins:
|
|
||||||
- gpg_fingerprint lookup and filter
|
|
||||||
- Miscellaneous modules and plugins:
|
|
||||||
- crypto_info module
|
|
||||||
- get_certificate module
|
|
||||||
- luks_device module
|
|
||||||
- parse_serial and to_serial filters
|
|
||||||
|
|
||||||
You can also find a list of all modules and plugins with documentation on the [Ansible docs site](https://docs.ansible.com/ansible/latest/collections/community/crypto/), or the [latest commit collection documentation](https://ansible-collections.github.io/community.crypto/branch/main/).
|
|
||||||
|
|
||||||
## Using this collection
|
## Using this collection
|
||||||
|
|
||||||
Before using the crypto community collection, you need to install the collection with the `ansible-galaxy` CLI:
|
Before using the crypto community collection, you need to install the collection with the `ansible-galaxy` CLI:
|
||||||
@@ -145,14 +93,6 @@ See the [changelog](https://github.com/ansible-collections/community.crypto/blob
|
|||||||
|
|
||||||
We plan to regularly release minor and patch versions, whenever new features are added or bugs fixed. Our collection follows [semantic versioning](https://semver.org/), so breaking changes will only happen in major releases.
|
We plan to regularly release minor and patch versions, whenever new features are added or bugs fixed. Our collection follows [semantic versioning](https://semver.org/), so breaking changes will only happen in major releases.
|
||||||
|
|
||||||
Most modules will drop PyOpenSSL support in version 2.0.0 of the collection, i.e. in the next major version. We currently plan to release 2.0.0 somewhen during 2021. Around then, the supported versions of the most common distributions will contain a new enough version of ``cryptography``.
|
|
||||||
|
|
||||||
Once 2.0.0 has been released, bugfixes will still be backported to 1.0.0 for some time, and some features might also be backported. If we do not want to backport something ourselves because we think it is not worth the effort, backport PRs by non-maintainers are usually accepted.
|
|
||||||
|
|
||||||
In 2.0.0, the following notable features will be removed:
|
|
||||||
* PyOpenSSL backends of all modules, except ``openssl_pkcs12`` which does not have a ``cryptography`` backend due to lack of support of PKCS#12 functionality in ``cryptography``.
|
|
||||||
* The ``assertonly`` provider of ``x509_certificate`` will be removed.
|
|
||||||
|
|
||||||
## More information
|
## More information
|
||||||
|
|
||||||
- [Ansible Collection overview](https://github.com/ansible-collections/overview)
|
- [Ansible Collection overview](https://github.com/ansible-collections/overview)
|
||||||
@@ -166,6 +106,6 @@ This collection is primarily licensed and distributed as a whole under the GNU G
|
|||||||
|
|
||||||
See [LICENSES/GPL-3.0-or-later.txt](https://github.com/ansible-collections/community.crypto/blob/main/COPYING) for the full text.
|
See [LICENSES/GPL-3.0-or-later.txt](https://github.com/ansible-collections/community.crypto/blob/main/COPYING) for the full text.
|
||||||
|
|
||||||
Parts of the collection are licensed under the [Apache 2.0 license](https://github.com/ansible-collections/community.crypto/blob/main/LICENSES/Apache-2.0.txt) (`plugins/module_utils/crypto/_obj2txt.py` and `plugins/module_utils/crypto/_objects_data.py`), the [BSD 2-Clause license](https://github.com/ansible-collections/community.crypto/blob/main/LICENSES/BSD-2-Clause.txt) (`plugins/module_utils/ecs/api.py`), the [BSD 3-Clause license](https://github.com/ansible-collections/community.crypto/blob/main/LICENSES/BSD-3-Clause.txt) (`plugins/module_utils/crypto/_obj2txt.py`, `tests/integration/targets/prepare_jinja2_compat/filter_plugins/jinja_compatibility.py`), and the [PSF 2.0 license](https://github.com/ansible-collections/community.crypto/blob/main/LICENSES/PSF-2.0.txt) (`plugins/module_utils/_version.py`). This only applies to vendored files in ``plugins/module_utils/`` and to the ECS module utils.
|
Parts of the collection are licensed under the [Apache 2.0 license](https://github.com/ansible-collections/community.crypto/blob/main/LICENSES/Apache-2.0.txt) (`plugins/module_utils/_crypto/_obj2txt.py` and `plugins/module_utils/_crypto/_objects_data.py`), the [BSD 2-Clause license](https://github.com/ansible-collections/community.crypto/blob/main/LICENSES/BSD-2-Clause.txt) (`plugins/module_utils/_ecs/api.py`), the [BSD 3-Clause license](https://github.com/ansible-collections/community.crypto/blob/main/LICENSES/BSD-3-Clause.txt) (`plugins/module_utils/_crypto/_obj2txt.py`). This only applies to vendored files in ``plugins/module_utils/`` and to the ECS module utils.
|
||||||
|
|
||||||
Almost all files have a machine readable `SDPX-License-Identifier:` comment denoting its respective license(s) or an equivalent entry in an accompanying `.license` file. Only changelog fragments (which will not be part of a release) are covered by a blanket statement in `.reuse/dep5`. Right now a few vendored PEM files do not have licensing information as well. This conforms to the [REUSE specification](https://reuse.software/spec/) up to the aforementioned PEM files.
|
All files have a machine readable `SDPX-License-Identifier:` comment denoting its respective license(s) or an equivalent entry in an accompanying `.license` file. Only changelog fragments (which will not be part of a release) are covered by a blanket statement in `REUSE.toml`. This conforms to the [REUSE specification](https://reuse.software/spec/).
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
---
|
|
||||||
# Copyright (c) Ansible Project
|
# Copyright (c) Ansible Project
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
dependencies:
|
version = 1
|
||||||
- setup_python_info
|
|
||||||
- setup_remote_constraints
|
[[annotations]]
|
||||||
- setup_pkg_mgr
|
path = "changelogs/fragments/**"
|
||||||
|
precedence = "aggregate"
|
||||||
|
SPDX-FileCopyrightText = "Ansible Project"
|
||||||
|
SPDX-License-Identifier = "GPL-3.0-or-later"
|
||||||
57
antsibull-nox.toml
Normal file
57
antsibull-nox.toml
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
# SPDX-FileCopyrightText: 2025 Felix Fontein <felix@fontein.de>
|
||||||
|
|
||||||
|
[collection_sources]
|
||||||
|
"community.internal_test_tools" = "git+https://github.com/ansible-collections/community.internal_test_tools.git,main"
|
||||||
|
|
||||||
|
[sessions]
|
||||||
|
|
||||||
|
[sessions.lint]
|
||||||
|
run_isort = true
|
||||||
|
isort_config = ".isort.cfg"
|
||||||
|
run_black = true
|
||||||
|
run_flake8 = true
|
||||||
|
flake8_config = ".flake8"
|
||||||
|
run_pylint = true
|
||||||
|
pylint_rcfile = ".pylintrc"
|
||||||
|
pylint_ansible_core_package = "ansible-core>=2.19.0b4"
|
||||||
|
run_yamllint = true
|
||||||
|
yamllint_config = ".yamllint"
|
||||||
|
yamllint_config_plugins = ".yamllint-docs"
|
||||||
|
yamllint_config_plugins_examples = ".yamllint-examples"
|
||||||
|
run_mypy = true
|
||||||
|
mypy_ansible_core_package = "ansible-core>=2.19.0b4"
|
||||||
|
mypy_config = ".mypy.ini"
|
||||||
|
mypy_extra_deps = [
|
||||||
|
"cryptography",
|
||||||
|
"types-mock",
|
||||||
|
"types-PyYAML",
|
||||||
|
]
|
||||||
|
|
||||||
|
[sessions.docs_check]
|
||||||
|
validate_collection_refs="all"
|
||||||
|
|
||||||
|
[sessions.license_check]
|
||||||
|
run_reuse = true
|
||||||
|
|
||||||
|
[sessions.extra_checks]
|
||||||
|
run_no_unwanted_files = true
|
||||||
|
no_unwanted_files_module_extensions = [".py"]
|
||||||
|
no_unwanted_files_yaml_extensions = [".yml"]
|
||||||
|
run_action_groups = true
|
||||||
|
|
||||||
|
[[sessions.extra_checks.action_groups_config]]
|
||||||
|
name = "acme"
|
||||||
|
pattern = "^acme_.*$"
|
||||||
|
exclusions = [
|
||||||
|
"acme_ari_info", # does not support ACME account
|
||||||
|
"acme_certificate_renewal_info", # does not support ACME account
|
||||||
|
"acme_challenge_cert_helper", # does not support (and need) any common parameters
|
||||||
|
]
|
||||||
|
doc_fragment = "community.crypto.attributes.actiongroup_acme"
|
||||||
|
|
||||||
|
[sessions.build_import_check]
|
||||||
|
run_galaxy_importer = true
|
||||||
|
|
||||||
|
[sessions.ansible_lint]
|
||||||
@@ -1616,3 +1616,140 @@ releases:
|
|||||||
- 2.25.0.yml
|
- 2.25.0.yml
|
||||||
- luks_device_passphrase_newlines.yml
|
- luks_device_passphrase_newlines.yml
|
||||||
release_date: '2025-02-09'
|
release_date: '2025-02-09'
|
||||||
|
2.26.0:
|
||||||
|
changes:
|
||||||
|
minor_changes:
|
||||||
|
- openssl_pkcs12 - the module now supports ``certificate_content``/``other_certificates_content``
|
||||||
|
for cases where the data already exists in memory and not yet in a file
|
||||||
|
(https://github.com/ansible-collections/community.crypto/issues/847, https://github.com/ansible-collections/community.crypto/pull/848).
|
||||||
|
release_summary: Feature release.
|
||||||
|
fragments:
|
||||||
|
- 2.26.0.yml
|
||||||
|
- openssl_pkcs12_content.yml
|
||||||
|
release_date: '2025-03-11'
|
||||||
|
2.26.1:
|
||||||
|
changes:
|
||||||
|
bugfixes:
|
||||||
|
- luks_device - mark parameter ``passphrase_encoding`` as ``no_log=False``
|
||||||
|
to avoid confusing warning (https://github.com/ansible-collections/community.crypto/pull/867).
|
||||||
|
- luks_device - removing a specific keyslot with ``remove_keyslot`` caused
|
||||||
|
the module to hang while cryptsetup was waiting for a passphrase from stdin,
|
||||||
|
while the module did not supply one. Since a keyslot is not necessary, do
|
||||||
|
not provide one (https://github.com/ansible-collections/community.crypto/issues/864,
|
||||||
|
https://github.com/ansible-collections/community.crypto/pull/868).
|
||||||
|
release_summary: Bugfix and maintenance release with improved CI.
|
||||||
|
fragments:
|
||||||
|
- 2.26.1.yml
|
||||||
|
- 867-passphrase-encoding-nolog.yml
|
||||||
|
- 868-luks-remove-keyslot.yml
|
||||||
|
release_date: '2025-04-28'
|
||||||
|
3.0.0-a1:
|
||||||
|
changes:
|
||||||
|
breaking_changes:
|
||||||
|
- All doc_fragments are now private to the collection and must not be used
|
||||||
|
from other collections or unrelated plugins/modules. Breaking changes in
|
||||||
|
these can happen at any time, even in bugfix releases (https://github.com/ansible-collections/community.crypto/pull/898).
|
||||||
|
- All module_utils and plugin_utils are now private to the collection and
|
||||||
|
must not be used from other collections or unrelated plugins/modules. Breaking
|
||||||
|
changes in these can happen at any time, even in bugfix releases (https://github.com/ansible-collections/community.crypto/pull/887).
|
||||||
|
- Ignore value of ``select_crypto_backend`` for all modules except acme_*
|
||||||
|
and ..., and always assume the value ``auto``. This ensures that the ``cryptography``
|
||||||
|
version is always checked (https://github.com/ansible-collections/community.crypto/pull/883).
|
||||||
|
- The validation for relative timestamps is now more strict. A string starting
|
||||||
|
with ``+`` or ``-`` must be valid, otherwise validation will fail. In the
|
||||||
|
past such strings were often silently ignored, and in many cases the code
|
||||||
|
which triggered the validation was not able to handle no result (https://github.com/ansible-collections/community.crypto/pull/885).
|
||||||
|
- acme.certificates module utils - the ``retrieve_acme_v1_certificate()``
|
||||||
|
helper function has been removed (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- get_certificate - the default for ``asn1_base64`` changed from ``false``
|
||||||
|
to ``true`` (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- x509_crl - the ``mode`` parameter no longer denotes the update mode, but
|
||||||
|
the CRL file mode. Use ``crl_mode`` instead for the update mode (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
deprecated_features:
|
||||||
|
- acme_certificate - deprecate the ``agreement`` option which has no more
|
||||||
|
effect. It will be removed from community.crypto 4.0.0 (https://github.com/ansible-collections/community.crypto/pull/891).
|
||||||
|
- openssl_pkcs12 - deprecate the ``maciter_size`` option which has no more
|
||||||
|
effect. It will be removed from community.crypto 4.0.0 (https://github.com/ansible-collections/community.crypto/pull/891).
|
||||||
|
minor_changes:
|
||||||
|
- No longer provide cryptography's ``backend`` parameter. This will break
|
||||||
|
with cryptography < 3.1 (https://github.com/ansible-collections/community.crypto/pull/878).
|
||||||
|
- On cryptography 36.0.0+, always use ``public_bytes()`` for X.509 extension
|
||||||
|
objects instead of using cryptography internals to obtain DER value of extension
|
||||||
|
(https://github.com/ansible-collections/community.crypto/pull/878).
|
||||||
|
- 'Python code modernization: add type hints and type checking (https://github.com/ansible-collections/community.crypto/pull/885).'
|
||||||
|
- 'Python code modernization: avoid unnecessary string conversion (https://github.com/ansible-collections/community.crypto/pull/880).'
|
||||||
|
- 'Python code modernization: avoid using ``six`` (https://github.com/ansible-collections/community.crypto/pull/884).'
|
||||||
|
- 'Python code modernization: remove Python 3 specific code (https://github.com/ansible-collections/community.crypto/pull/877).'
|
||||||
|
- 'Python code modernization: update ``__future__`` imports, remove Python
|
||||||
|
2 specific boilerplates (https://github.com/ansible-collections/community.crypto/pull/876).'
|
||||||
|
- 'Python code modernization: use ``unittest.mock`` instead of ``ansible_collections.community.internal_test_tools.tests.unit.compat.mock``
|
||||||
|
(https://github.com/ansible-collections/community.crypto/pull/881).'
|
||||||
|
- 'Python code modernization: use f-strings instead of ``%`` and ``str.format()``
|
||||||
|
(https://github.com/ansible-collections/community.crypto/pull/875).'
|
||||||
|
- Remove ``backend`` parameter from internal code whenever possible (https://github.com/ansible-collections/community.crypto/pull/883).
|
||||||
|
- Remove various compatibility code for cryptography < 3.3 (https://github.com/ansible-collections/community.crypto/pull/878).
|
||||||
|
- Remove vendored copy of ``distutils.version`` in favor of vendored copy
|
||||||
|
included with ansible-core 2.12+ (https://github.com/ansible-collections/community.crypto/pull/371).
|
||||||
|
- acme_* modules - improve parsing of ``Retry-After`` reply headers in regular
|
||||||
|
ACME requests (https://github.com/ansible-collections/community.crypto/pull/890).
|
||||||
|
- action_module plugin utils - remove compatibility with older ansible-core/ansible-base/Ansible
|
||||||
|
versions (https://github.com/ansible-collections/community.crypto/pull/872).
|
||||||
|
- x509_certificate, x509_certificate_pipe - the ``ownca_version`` and ``selfsigned_version``
|
||||||
|
parameters explicitly only allow the value ``3``. The module already failed
|
||||||
|
for other values in the past, now this is validated as part of the module
|
||||||
|
argument spec (https://github.com/ansible-collections/community.crypto/pull/890).
|
||||||
|
release_summary: 'First pre-release for community.crypto 3.0.0.
|
||||||
|
|
||||||
|
This release drops compatibility for ansible-core before 2.17, for Python
|
||||||
|
before 3.7, and for cryptography before 3.3.
|
||||||
|
|
||||||
|
'
|
||||||
|
removed_features:
|
||||||
|
- The collection no longer supports cryptography < 3.3 (https://github.com/ansible-collections/community.crypto/pull/878,
|
||||||
|
https://github.com/ansible-collections/community.crypto/pull/882).
|
||||||
|
- acme.acme module utils - the ``get_default_argspec()`` function has been
|
||||||
|
removed. Use ``create_default_argspec()`` instead (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- acme.backends module utils - the methods ``get_ordered_csr_identifiers()``
|
||||||
|
and ``get_cert_information()`` of ``CryptoBackend`` now must be implemented
|
||||||
|
(https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- acme.documentation docs fragment - the ``documentation`` docs fragment has
|
||||||
|
been removed. Use both the ``basic`` and ``account`` docs fragments in ``acme``
|
||||||
|
instead (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- acme_* modules - support for ACME v1 has been removed (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- community.crypto no longer supports Ansible 2.9, ansible-base 2.10, and
|
||||||
|
ansible-core versions 2.11, 2.12, 2.13, 2.14, 2.15, and 2.16. While content
|
||||||
|
from this collection might still work with some older versions of ansible-core,
|
||||||
|
it will not work with any Python version before 3.7 (https://github.com/ansible-collections/community.crypto/pull/870).
|
||||||
|
- crypto.basic module utils - remove ``CRYPTOGRAPHY_HAS_*`` flags. All tested
|
||||||
|
features are supported since cryptography 3.0 (https://github.com/ansible-collections/community.crypto/pull/878).
|
||||||
|
- crypto.cryptography_support module utils - remove ``cryptography_serial_number_of_cert()``
|
||||||
|
helper function (https://github.com/ansible-collections/community.crypto/pull/878).
|
||||||
|
- crypto.module_backends.common module utils - this module utils has been
|
||||||
|
removed. Use the ``argspec`` module utils instead (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- crypto.support module utils - remove ``pyopenssl`` backend (https://github.com/ansible-collections/community.crypto/pull/874).
|
||||||
|
- execution environment dependencies - remove PyOpenSSL dependency (https://github.com/ansible-collections/community.crypto/pull/874).
|
||||||
|
- openssl_csr_pipe - the module now ignores check mode and will always behave
|
||||||
|
as if check mode is not active (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- openssl_pkcs12 - support for the ``pyopenssl`` backend has been removed
|
||||||
|
(https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- openssl_privatekey_pipe - the module now ignores check mode and will always
|
||||||
|
behave as if check mode is not active (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
- time module utils - remove ``pyopenssl`` backend (https://github.com/ansible-collections/community.crypto/pull/874).
|
||||||
|
- x509_certificate_pipe - the module now ignores check mode and will always
|
||||||
|
behave as if check mode is not active (https://github.com/ansible-collections/community.crypto/pull/873).
|
||||||
|
fragments:
|
||||||
|
- 3.0.0-a1.yml
|
||||||
|
- 371-distutils-vendor-removed.yml
|
||||||
|
- 870-ansible-core.yml
|
||||||
|
- 872-action-module.yml
|
||||||
|
- 873-deprecation-removals.yml
|
||||||
|
- 874-pyopenssl.yml
|
||||||
|
- 878-backend.yml
|
||||||
|
- 883-backend.yml
|
||||||
|
- 887-module_utils-plugin_utils.yml
|
||||||
|
- 890-refactoring.yml
|
||||||
|
- 891-deprecation.yml
|
||||||
|
- 898-doc_fragments.yml
|
||||||
|
- refactoring.yml
|
||||||
|
- relative-timestamps.yml
|
||||||
|
release_date: '2025-05-18'
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
namespace: community
|
namespace: community
|
||||||
name: crypto
|
name: crypto
|
||||||
version: 2.25.0
|
version: 3.0.0-a1
|
||||||
readme: README.md
|
readme: README.md
|
||||||
authors:
|
authors:
|
||||||
- Ansible (github.com/ansible)
|
- Ansible (github.com/ansible)
|
||||||
|
|||||||
@@ -11,11 +11,3 @@ openssl [platform:rpm]
|
|||||||
python3-cryptography [platform:dpkg]
|
python3-cryptography [platform:dpkg]
|
||||||
python3-cryptography [platform:rpm]
|
python3-cryptography [platform:rpm]
|
||||||
python3-openssl [platform:dpkg]
|
python3-openssl [platform:dpkg]
|
||||||
# On RHEL 9+, CentOS Stream 9+, and Rocky Linux 9+, python3-pyOpenSSL is part of EPEL
|
|
||||||
python3-pyOpenSSL [platform:rpm !platform:rhel !platform:centos !platform:rocky]
|
|
||||||
python3-pyOpenSSL [platform:rhel-8]
|
|
||||||
python3-pyOpenSSL [platform:rhel !platform:rhel-6 !platform:rhel-7 !platform:rhel-8 epel]
|
|
||||||
python3-pyOpenSSL [platform:centos-8]
|
|
||||||
python3-pyOpenSSL [platform:centos !platform:centos-6 !platform:centos-7 !platform:centos-8 epel]
|
|
||||||
python3-pyOpenSSL [platform:rocky-8]
|
|
||||||
python3-pyOpenSSL [platform:rocky !platform:rocky-8 epel]
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
requires_ansible: '>=2.9.10'
|
requires_ansible: '>=2.17.0'
|
||||||
|
|
||||||
action_groups:
|
action_groups:
|
||||||
acme:
|
acme:
|
||||||
|
|||||||
40
noxfile.py
Normal file
40
noxfile.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
# SPDX-FileCopyrightText: 2025 Felix Fontein <felix@fontein.de>
|
||||||
|
|
||||||
|
# /// script
|
||||||
|
# dependencies = ["nox>=2025.02.09", "antsibull-nox"]
|
||||||
|
# ///
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import nox
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import antsibull_nox
|
||||||
|
except ImportError:
|
||||||
|
print("You need to install antsibull-nox in the same Python environment as nox.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
antsibull_nox.load_antsibull_nox_toml()
|
||||||
|
|
||||||
|
|
||||||
|
@nox.session(name="create-certificates", default=False)
|
||||||
|
def create_certificates(session: nox.Session) -> None:
|
||||||
|
"""
|
||||||
|
Regenerate some vendored certificates.
|
||||||
|
"""
|
||||||
|
session.install("cryptography<39.0.0") # we want support for SHA1 signatures
|
||||||
|
session.run("python", "tests/create-certificates.py")
|
||||||
|
session.warn(
|
||||||
|
"Note that you need to modify some values in tests/integration/targets/x509_certificate_info/tasks/impl.yml"
|
||||||
|
" and tests/integration/targets/filter_x509_certificate_info/tasks/impl.yml!"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Allow to run the noxfile with `python noxfile.py`, `pipx run noxfile.py`, or similar.
|
||||||
|
# Requires nox >= 2025.02.09
|
||||||
|
if __name__ == "__main__":
|
||||||
|
nox.main()
|
||||||
@@ -1,111 +1,100 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright (c) 2020, Felix Fontein <felix@fontein.de>
|
# Copyright (c) 2020, Felix Fontein <felix@fontein.de>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
|
import typing as t
|
||||||
|
|
||||||
from ansible.module_utils.common.text.converters import to_native, to_bytes
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
from ansible_collections.community.crypto.plugins.plugin_utils.action_module import ActionModuleBase
|
|
||||||
|
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.crypto.basic import (
|
|
||||||
OpenSSLObjectError,
|
OpenSSLObjectError,
|
||||||
)
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.privatekey import (
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.crypto.module_backends.privatekey import (
|
|
||||||
select_backend,
|
|
||||||
get_privatekey_argument_spec,
|
get_privatekey_argument_spec,
|
||||||
|
select_backend,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._action_module import (
|
||||||
|
ActionModuleBase,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class PrivateKeyModule(object):
|
if t.TYPE_CHECKING:
|
||||||
def __init__(self, module, module_backend):
|
from ansible_collections.community.crypto.plugins.module_utils._argspec import (
|
||||||
|
ArgumentSpec,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.privatekey import (
|
||||||
|
PrivateKeyBackend,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._action_module import (
|
||||||
|
AnsibleActionModule,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateKeyModule:
|
||||||
|
def __init__(
|
||||||
|
self, module: AnsibleActionModule, module_backend: PrivateKeyBackend
|
||||||
|
) -> None:
|
||||||
self.module = module
|
self.module = module
|
||||||
self.module_backend = module_backend
|
self.module_backend = module_backend
|
||||||
self.check_mode = module.check_mode
|
self.check_mode = module.check_mode
|
||||||
self.changed = False
|
self.changed = False
|
||||||
self.return_current_key = module.params['return_current_key']
|
self.return_current_key: bool = module.params["return_current_key"]
|
||||||
|
|
||||||
if module.params['content'] is not None:
|
content: str | None = module.params["content"]
|
||||||
if module.params['content_base64']:
|
content_base64: bool = module.params["content_base64"]
|
||||||
|
if content is not None:
|
||||||
|
if content_base64:
|
||||||
try:
|
try:
|
||||||
data = base64.b64decode(module.params['content'])
|
data = base64.b64decode(content)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
module.fail_json(msg='Cannot decode Base64 encoded data: {0}'.format(e))
|
module.fail_json(msg=f"Cannot decode Base64 encoded data: {e}")
|
||||||
else:
|
else:
|
||||||
data = to_bytes(module.params['content'])
|
data = to_bytes(content)
|
||||||
module_backend.set_existing(data)
|
module_backend.set_existing(privatekey_bytes=data)
|
||||||
|
|
||||||
def generate(self, module):
|
def generate(self, module: AnsibleActionModule) -> None:
|
||||||
"""Generate a keypair."""
|
"""Generate a keypair."""
|
||||||
|
|
||||||
if self.module_backend.needs_regeneration():
|
if self.module_backend.needs_regeneration():
|
||||||
# Regenerate
|
# Regenerate
|
||||||
if not self.check_mode:
|
|
||||||
self.module_backend.generate_private_key()
|
self.module_backend.generate_private_key()
|
||||||
privatekey_data = self.module_backend.get_private_key_data()
|
# Call get_private_key_data() to make sure that exceptions are raised now:
|
||||||
self.privatekey_bytes = privatekey_data
|
self.module_backend.get_private_key_data()
|
||||||
else:
|
|
||||||
self.module.deprecate(
|
|
||||||
'Check mode support for openssl_privatekey_pipe will change in community.crypto 3.0.0'
|
|
||||||
' to behave the same as without check mode. You can get that behavior right now'
|
|
||||||
' by adding `check_mode: false` to the openssl_privatekey_pipe task. If you think this'
|
|
||||||
' breaks your use-case of this module, please create an issue in the'
|
|
||||||
' community.crypto repository',
|
|
||||||
version='3.0.0',
|
|
||||||
collection_name='community.crypto',
|
|
||||||
)
|
|
||||||
self.changed = True
|
self.changed = True
|
||||||
elif self.module_backend.needs_conversion():
|
elif self.module_backend.needs_conversion():
|
||||||
# Convert
|
# Convert
|
||||||
if not self.check_mode:
|
|
||||||
self.module_backend.convert_private_key()
|
self.module_backend.convert_private_key()
|
||||||
privatekey_data = self.module_backend.get_private_key_data()
|
# Call get_private_key_data() to make sure that exceptions are raised now:
|
||||||
self.privatekey_bytes = privatekey_data
|
self.module_backend.get_private_key_data()
|
||||||
else:
|
|
||||||
self.module.deprecate(
|
|
||||||
'Check mode support for openssl_privatekey_pipe will change in community.crypto 3.0.0'
|
|
||||||
' to behave the same as without check mode. You can get that behavior right now'
|
|
||||||
' by adding `check_mode: false` to the openssl_privatekey_pipe task. If you think this'
|
|
||||||
' breaks your use-case of this module, please create an issue in the'
|
|
||||||
' community.crypto repository',
|
|
||||||
version='3.0.0',
|
|
||||||
collection_name='community.crypto',
|
|
||||||
)
|
|
||||||
self.changed = True
|
self.changed = True
|
||||||
|
|
||||||
def dump(self):
|
def dump(self) -> dict[str, t.Any]:
|
||||||
"""Serialize the object into a dictionary."""
|
"""Serialize the object into a dictionary."""
|
||||||
result = self.module_backend.dump(include_key=self.changed or self.return_current_key)
|
result = self.module_backend.dump(
|
||||||
result['changed'] = self.changed
|
include_key=self.changed or self.return_current_key
|
||||||
|
)
|
||||||
|
result["changed"] = self.changed
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class ActionModule(ActionModuleBase):
|
class ActionModule(ActionModuleBase):
|
||||||
@staticmethod
|
def setup_module(self) -> tuple[ArgumentSpec, dict[str, t.Any]]:
|
||||||
def setup_module():
|
|
||||||
argument_spec = get_privatekey_argument_spec()
|
argument_spec = get_privatekey_argument_spec()
|
||||||
argument_spec.argument_spec.update(dict(
|
argument_spec.argument_spec.update(
|
||||||
content=dict(type='str', no_log=True),
|
{
|
||||||
content_base64=dict(type='bool', default=False),
|
"content": {"type": "str", "no_log": True},
|
||||||
return_current_key=dict(type='bool', default=False),
|
"content_base64": {"type": "bool", "default": False},
|
||||||
))
|
"return_current_key": {"type": "bool", "default": False},
|
||||||
return argument_spec, dict(
|
}
|
||||||
supports_check_mode=True,
|
|
||||||
)
|
)
|
||||||
|
return argument_spec, {
|
||||||
|
"supports_check_mode": True,
|
||||||
|
}
|
||||||
|
|
||||||
@staticmethod
|
def run_module(self, module: AnsibleActionModule) -> None:
|
||||||
def run_module(module):
|
module_backend = select_backend(module=module)
|
||||||
backend, module_backend = select_backend(
|
|
||||||
module=module,
|
|
||||||
backend=module.params['select_crypto_backend'],
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
private_key = PrivateKeyModule(module, module_backend)
|
private_key = PrivateKeyModule(module, module_backend)
|
||||||
@@ -119,10 +108,10 @@ class ActionModule(ActionModuleBase):
|
|||||||
# `module.no_log = True`, this should be safe.
|
# `module.no_log = True`, this should be safe.
|
||||||
module.no_log = True
|
module.no_log = True
|
||||||
try:
|
try:
|
||||||
module.no_log_values.remove(module.params['content'])
|
module.no_log_values.remove(module.params["content"])
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
module.params['content'] = 'ANSIBLE_NO_LOG_VALUE'
|
module.params["content"] = "ANSIBLE_NO_LOG_VALUE"
|
||||||
module.exit_json(**result)
|
module.exit_json(**result)
|
||||||
except OpenSSLObjectError as exc:
|
except OpenSSLObjectError as exc:
|
||||||
module.fail_json(msg=to_native(exc))
|
module.fail_json(msg=str(exc))
|
||||||
|
|||||||
@@ -1,115 +1,14 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
|
# Copyright (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
# Note that this doc fragment is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
__metaclass__ = type
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
class ModuleDocFragment(object):
|
class ModuleDocFragment:
|
||||||
|
|
||||||
# Standard files documentation fragment
|
|
||||||
#
|
|
||||||
# NOTE: This document fragment is DEPRECATED and will be removed from community.crypto 3.0.0.
|
|
||||||
# Use both the BASIC and ACCOUNT fragments as a replacement.
|
|
||||||
DOCUMENTATION = r"""
|
|
||||||
notes:
|
|
||||||
- If a new enough version of the C(cryptography) library is available (see Requirements for details), it will be used instead
|
|
||||||
of the C(openssl) binary. This can be explicitly disabled or enabled with the O(select_crypto_backend) option. Note that
|
|
||||||
using the C(openssl) binary will be slower and less secure, as private key contents always have to be stored on disk (see
|
|
||||||
O(account_key_content)).
|
|
||||||
- Although the defaults are chosen so that the module can be used with the L(Let's Encrypt,https://letsencrypt.org/) CA,
|
|
||||||
the module can in principle be used with any CA providing an ACME endpoint, such as L(Buypass Go SSL,https://www.buypass.com/ssl/products/acme).
|
|
||||||
- So far, the ACME modules have only been tested by the developers against Let's Encrypt (staging and production), Buypass
|
|
||||||
(staging and production), ZeroSSL (production), and L(Pebble testing server,https://github.com/letsencrypt/Pebble). We
|
|
||||||
have got community feedback that they also work with Sectigo ACME Service for InCommon. If you experience problems with
|
|
||||||
another ACME server, please L(create an issue,https://github.com/ansible-collections/community.crypto/issues/new/choose)
|
|
||||||
to help us supporting it. Feedback that an ACME server not mentioned does work is also appreciated.
|
|
||||||
requirements:
|
|
||||||
- either openssl or L(cryptography,https://cryptography.io/) >= 1.5
|
|
||||||
- ipaddress
|
|
||||||
options:
|
|
||||||
account_key_src:
|
|
||||||
description:
|
|
||||||
- Path to a file containing the ACME account RSA or Elliptic Curve key.
|
|
||||||
- 'Private keys can be created with the M(community.crypto.openssl_privatekey) or M(community.crypto.openssl_privatekey_pipe)
|
|
||||||
modules. If the requisite (cryptography) is not available, keys can also be created directly with the C(openssl) command
|
|
||||||
line tool: RSA keys can be created with C(openssl genrsa ...). Elliptic curve keys can be created with C(openssl ecparam
|
|
||||||
-genkey ...). Any other tool creating private keys in PEM format can be used as well.'
|
|
||||||
- Mutually exclusive with O(account_key_content).
|
|
||||||
- Required if O(account_key_content) is not used.
|
|
||||||
type: path
|
|
||||||
aliases: [account_key]
|
|
||||||
account_key_content:
|
|
||||||
description:
|
|
||||||
- Content of the ACME account RSA or Elliptic Curve key.
|
|
||||||
- Mutually exclusive with O(account_key_src).
|
|
||||||
- Required if O(account_key_src) is not used.
|
|
||||||
- B(Warning:) the content will be written into a temporary file, which will be deleted by Ansible when the module completes.
|
|
||||||
Since this is an important private key — it can be used to change the account key, or to revoke your certificates
|
|
||||||
without knowing their private keys —, this might not be acceptable.
|
|
||||||
- In case C(cryptography) is used, the content is not written into a temporary file. It can still happen that it is
|
|
||||||
written to disk by Ansible in the process of moving the module with its argument to the node where it is executed.
|
|
||||||
type: str
|
|
||||||
account_key_passphrase:
|
|
||||||
description:
|
|
||||||
- Phassphrase to use to decode the account key.
|
|
||||||
- B(Note:) this is not supported by the C(openssl) backend, only by the C(cryptography) backend.
|
|
||||||
type: str
|
|
||||||
version_added: 1.6.0
|
|
||||||
account_uri:
|
|
||||||
description:
|
|
||||||
- If specified, assumes that the account URI is as given. If the account key does not match this account, or an account
|
|
||||||
with this URI does not exist, the module fails.
|
|
||||||
type: str
|
|
||||||
acme_version:
|
|
||||||
description:
|
|
||||||
- The ACME version of the endpoint.
|
|
||||||
- Must be V(1) for the classic Let's Encrypt and Buypass ACME endpoints, or V(2) for standardized ACME v2 endpoints.
|
|
||||||
- The value V(1) is deprecated since community.crypto 2.0.0 and will be removed from community.crypto 3.0.0.
|
|
||||||
required: true
|
|
||||||
type: int
|
|
||||||
choices: [1, 2]
|
|
||||||
acme_directory:
|
|
||||||
description:
|
|
||||||
- The ACME directory to use. This is the entry point URL to access the ACME CA server API.
|
|
||||||
- For safety reasons the default is set to the Let's Encrypt staging server (for the ACME v1 protocol). This will create
|
|
||||||
technically correct, but untrusted certificates.
|
|
||||||
- "For Let's Encrypt, all staging endpoints can be found here: U(https://letsencrypt.org/docs/staging-environment/).
|
|
||||||
For Buypass, all endpoints can be found here: U(https://community.buypass.com/t/63d4ay/buypass-go-ssl-endpoints)."
|
|
||||||
- For B(Let's Encrypt), the production directory URL for ACME v2 is U(https://acme-v02.api.letsencrypt.org/directory).
|
|
||||||
- For B(Buypass), the production directory URL for ACME v2 and v1 is U(https://api.buypass.com/acme/directory).
|
|
||||||
- For B(ZeroSSL), the production directory URL for ACME v2 is U(https://acme.zerossl.com/v2/DV90).
|
|
||||||
- For B(Sectigo), the production directory URL for ACME v2 is U(https://acme-qa.secure.trust-provider.com/v2/DV).
|
|
||||||
- The notes for this module contain a list of ACME services this module has been tested against.
|
|
||||||
required: true
|
|
||||||
type: str
|
|
||||||
validate_certs:
|
|
||||||
description:
|
|
||||||
- Whether calls to the ACME directory will validate TLS certificates.
|
|
||||||
- B(Warning:) Should B(only ever) be set to V(false) for testing purposes, for example when testing against a local
|
|
||||||
Pebble server.
|
|
||||||
type: bool
|
|
||||||
default: true
|
|
||||||
select_crypto_backend:
|
|
||||||
description:
|
|
||||||
- Determines which crypto backend to use.
|
|
||||||
- The default choice is V(auto), which tries to use C(cryptography) if available, and falls back to C(openssl).
|
|
||||||
- If set to V(openssl), will try to use the C(openssl) binary.
|
|
||||||
- If set to V(cryptography), will try to use the L(cryptography,https://cryptography.io/) library.
|
|
||||||
type: str
|
|
||||||
default: auto
|
|
||||||
choices: [auto, cryptography, openssl]
|
|
||||||
request_timeout:
|
|
||||||
description:
|
|
||||||
- The time Ansible should wait for a response from the ACME API.
|
|
||||||
- This timeout is applied to all HTTP(S) requests (HEAD, GET, POST).
|
|
||||||
type: int
|
|
||||||
default: 10
|
|
||||||
version_added: 2.3.0
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Basic documentation fragment without account data
|
# Basic documentation fragment without account data
|
||||||
BASIC = r"""
|
BASIC = r"""
|
||||||
@@ -122,17 +21,18 @@ notes:
|
|||||||
another ACME server, please L(create an issue,https://github.com/ansible-collections/community.crypto/issues/new/choose)
|
another ACME server, please L(create an issue,https://github.com/ansible-collections/community.crypto/issues/new/choose)
|
||||||
to help us supporting it. Feedback that an ACME server not mentioned does work is also appreciated.
|
to help us supporting it. Feedback that an ACME server not mentioned does work is also appreciated.
|
||||||
requirements:
|
requirements:
|
||||||
- either openssl or L(cryptography,https://cryptography.io/) >= 1.5
|
- either C(openssl)
|
||||||
- ipaddress
|
- or L(cryptography,https://cryptography.io/) >= 3.3
|
||||||
options:
|
options:
|
||||||
acme_version:
|
acme_version:
|
||||||
description:
|
description:
|
||||||
- The ACME version of the endpoint.
|
- The ACME version of the endpoint.
|
||||||
- Must be V(1) for the classic Let's Encrypt and Buypass ACME endpoints, or V(2) for standardized ACME v2 endpoints.
|
- Must be V(2) for standardized ACME v2 endpoints.
|
||||||
- The value V(1) is deprecated since community.crypto 2.0.0 and will be removed from community.crypto 3.0.0.
|
- The value V(1) is no longer supported since community.crypto 3.0.0.
|
||||||
required: true
|
|
||||||
type: int
|
type: int
|
||||||
choices: [1, 2]
|
default: 2
|
||||||
|
choices:
|
||||||
|
- 2
|
||||||
acme_directory:
|
acme_directory:
|
||||||
description:
|
description:
|
||||||
- The ACME directory to use. This is the entry point URL to access the ACME CA server API.
|
- The ACME directory to use. This is the entry point URL to access the ACME CA server API.
|
||||||
@@ -190,7 +90,8 @@ options:
|
|||||||
- Mutually exclusive with O(account_key_content).
|
- Mutually exclusive with O(account_key_content).
|
||||||
- Required if O(account_key_content) is not used.
|
- Required if O(account_key_content) is not used.
|
||||||
type: path
|
type: path
|
||||||
aliases: [account_key]
|
aliases:
|
||||||
|
- account_key
|
||||||
account_key_content:
|
account_key_content:
|
||||||
description:
|
description:
|
||||||
- Content of the ACME account RSA or Elliptic Curve key.
|
- Content of the ACME account RSA or Elliptic Curve key.
|
||||||
@@ -216,7 +117,7 @@ options:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# No account data documentation fragment
|
# No account data documentation fragment
|
||||||
NO_ACCOUNT = r'''
|
NO_ACCOUNT = r"""
|
||||||
notes:
|
notes:
|
||||||
- "If a new enough version of the C(cryptography) library
|
- "If a new enough version of the C(cryptography) library
|
||||||
is available (see Requirements for details), it will be used
|
is available (see Requirements for details), it will be used
|
||||||
@@ -224,7 +125,7 @@ notes:
|
|||||||
or enabled with the O(select_crypto_backend) option. Note that using
|
or enabled with the O(select_crypto_backend) option. Note that using
|
||||||
the C(openssl) binary will be slower."
|
the C(openssl) binary will be slower."
|
||||||
options: {}
|
options: {}
|
||||||
'''
|
"""
|
||||||
|
|
||||||
CERTIFICATE = r"""
|
CERTIFICATE = r"""
|
||||||
options:
|
options:
|
||||||
@@ -1,14 +1,14 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright (c) Ansible Project
|
# Copyright (c) Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
# Note that this doc fragment is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
__metaclass__ = type
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
class ModuleDocFragment(object):
|
class ModuleDocFragment:
|
||||||
|
|
||||||
# Standard documentation fragment
|
# Standard documentation fragment
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
@@ -35,7 +35,7 @@ attributes:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Should be used together with the standard fragment
|
# Should be used together with the standard fragment
|
||||||
INFO_MODULE = r'''
|
INFO_MODULE = r"""
|
||||||
options: {}
|
options: {}
|
||||||
attributes:
|
attributes:
|
||||||
check_mode:
|
check_mode:
|
||||||
@@ -46,9 +46,9 @@ attributes:
|
|||||||
support: N/A
|
support: N/A
|
||||||
details:
|
details:
|
||||||
- This action does not modify state.
|
- This action does not modify state.
|
||||||
'''
|
"""
|
||||||
|
|
||||||
ACTIONGROUP_ACME = r'''
|
ACTIONGROUP_ACME = r"""
|
||||||
options: {}
|
options: {}
|
||||||
attributes:
|
attributes:
|
||||||
action_group:
|
action_group:
|
||||||
@@ -57,7 +57,7 @@ attributes:
|
|||||||
membership:
|
membership:
|
||||||
- community.crypto.acme
|
- community.crypto.acme
|
||||||
- acme
|
- acme
|
||||||
'''
|
"""
|
||||||
|
|
||||||
FACTS = r"""
|
FACTS = r"""
|
||||||
options: {}
|
options: {}
|
||||||
@@ -67,7 +67,7 @@ attributes:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Should be used together with the standard fragment and the FACTS fragment
|
# Should be used together with the standard fragment and the FACTS fragment
|
||||||
FACTS_MODULE = r'''
|
FACTS_MODULE = r"""
|
||||||
options: {}
|
options: {}
|
||||||
attributes:
|
attributes:
|
||||||
check_mode:
|
check_mode:
|
||||||
@@ -80,7 +80,7 @@ attributes:
|
|||||||
- This action does not modify state.
|
- This action does not modify state.
|
||||||
facts:
|
facts:
|
||||||
support: full
|
support: full
|
||||||
'''
|
"""
|
||||||
|
|
||||||
FILES = r"""
|
FILES = r"""
|
||||||
options: {}
|
options: {}
|
||||||
23
plugins/doc_fragments/_cryptography_dep.py
Normal file
23
plugins/doc_fragments/_cryptography_dep.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Copyright (c) 2025 Ansible project
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this doc fragment is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleDocFragment:
|
||||||
|
"""
|
||||||
|
Doc fragments for cryptography requirements.
|
||||||
|
|
||||||
|
Must be kept in sync with plugins/module_utils/_cryptography_dep.py.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Corresponds to the plugins.module_utils._cryptography_dep.COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION constant
|
||||||
|
MINIMUM = r"""
|
||||||
|
requirements:
|
||||||
|
- cryptography >= 3.3
|
||||||
|
options: {}
|
||||||
|
"""
|
||||||
@@ -1,14 +1,14 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright (c), Entrust Datacard Corporation, 2019
|
# Copyright (c), Entrust Datacard Corporation, 2019
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
# Note that this doc fragment is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
__metaclass__ = type
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
class ModuleDocFragment(object):
|
class ModuleDocFragment:
|
||||||
|
|
||||||
# Plugin options for Entrust Certificate Services (ECS) credentials
|
# Plugin options for Entrust Certificate Services (ECS) credentials
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
@@ -1,15 +1,15 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright (c) 2016-2017, Yanis Guenane <yanis+ansible@guenane.org>
|
# Copyright (c) 2016-2017, Yanis Guenane <yanis+ansible@guenane.org>
|
||||||
# Copyright (c) 2017, Markus Teufelberger <mteufelberger+ansible@mgit.at>
|
# Copyright (c) 2017, Markus Teufelberger <mteufelberger+ansible@mgit.at>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
# Note that this doc fragment is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
__metaclass__ = type
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
class ModuleDocFragment(object):
|
class ModuleDocFragment:
|
||||||
|
|
||||||
# Standard files documentation fragment
|
# Standard files documentation fragment
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
@@ -25,7 +25,7 @@ attributes:
|
|||||||
- If relative timestamps are used and O(ignore_timestamps=false), the module is not idempotent.
|
- If relative timestamps are used and O(ignore_timestamps=false), the module is not idempotent.
|
||||||
- The option O(force=true) generally disables idempotency.
|
- The option O(force=true) generally disables idempotency.
|
||||||
requirements:
|
requirements:
|
||||||
- cryptography >= 1.6 (if using V(selfsigned) or V(ownca) provider)
|
- cryptography >= 3.3 (if using V(selfsigned) or V(ownca) provider)
|
||||||
options:
|
options:
|
||||||
force:
|
force:
|
||||||
description:
|
description:
|
||||||
@@ -74,6 +74,9 @@ options:
|
|||||||
- Determines which crypto backend to use.
|
- Determines which crypto backend to use.
|
||||||
- The default choice is V(auto), which tries to use C(cryptography) if available.
|
- The default choice is V(auto), which tries to use C(cryptography) if available.
|
||||||
- If set to V(cryptography), will try to use the L(cryptography,https://cryptography.io/) library.
|
- If set to V(cryptography), will try to use the L(cryptography,https://cryptography.io/) library.
|
||||||
|
- Note that with community.crypto 3.0.0, all values behave the same.
|
||||||
|
This option will be deprecated in a later version.
|
||||||
|
We recommend to not set it explicitly.
|
||||||
type: str
|
type: str
|
||||||
default: auto
|
default: auto
|
||||||
choices: [auto, cryptography]
|
choices: [auto, cryptography]
|
||||||
@@ -94,7 +97,7 @@ seealso:
|
|||||||
- module: community.crypto.openssl_publickey
|
- module: community.crypto.openssl_publickey
|
||||||
"""
|
"""
|
||||||
|
|
||||||
BACKEND_ACME_DOCUMENTATION = r'''
|
BACKEND_ACME_DOCUMENTATION = r"""
|
||||||
description:
|
description:
|
||||||
- This module allows one to (re)generate OpenSSL certificates.
|
- This module allows one to (re)generate OpenSSL certificates.
|
||||||
requirements:
|
requirements:
|
||||||
@@ -127,9 +130,9 @@ options:
|
|||||||
- "Let's Encrypt recommends using their staging server while developing jobs. U(https://letsencrypt.org/docs/staging-environment/)."
|
- "Let's Encrypt recommends using their staging server while developing jobs. U(https://letsencrypt.org/docs/staging-environment/)."
|
||||||
type: str
|
type: str
|
||||||
default: https://acme-v02.api.letsencrypt.org/directory
|
default: https://acme-v02.api.letsencrypt.org/directory
|
||||||
'''
|
"""
|
||||||
|
|
||||||
BACKEND_ENTRUST_DOCUMENTATION = r'''
|
BACKEND_ENTRUST_DOCUMENTATION = r"""
|
||||||
options:
|
options:
|
||||||
entrust_cert_type:
|
entrust_cert_type:
|
||||||
description:
|
description:
|
||||||
@@ -212,9 +215,9 @@ options:
|
|||||||
- This is only used by the V(entrust) provider.
|
- This is only used by the V(entrust) provider.
|
||||||
type: path
|
type: path
|
||||||
default: https://cloud.entrust.net/EntrustCloud/documentation/cms-api-2.1.0.yaml
|
default: https://cloud.entrust.net/EntrustCloud/documentation/cms-api-2.1.0.yaml
|
||||||
'''
|
"""
|
||||||
|
|
||||||
BACKEND_OWNCA_DOCUMENTATION = r'''
|
BACKEND_OWNCA_DOCUMENTATION = r"""
|
||||||
description:
|
description:
|
||||||
- The V(ownca) provider is intended for generating an OpenSSL certificate signed with your own
|
- The V(ownca) provider is intended for generating an OpenSSL certificate signed with your own
|
||||||
CA (Certificate Authority) certificate (self-signed certificate).
|
CA (Certificate Authority) certificate (self-signed certificate).
|
||||||
@@ -265,6 +268,8 @@ options:
|
|||||||
- This is only used by the V(ownca) provider.
|
- This is only used by the V(ownca) provider.
|
||||||
type: int
|
type: int
|
||||||
default: 3
|
default: 3
|
||||||
|
choices:
|
||||||
|
- 3
|
||||||
|
|
||||||
ownca_not_before:
|
ownca_not_before:
|
||||||
description:
|
description:
|
||||||
@@ -307,7 +312,6 @@ options:
|
|||||||
ignored.
|
ignored.
|
||||||
- A value of V(never_create) never creates a SKI. If the CSR provides one, that one is used.
|
- A value of V(never_create) never creates a SKI. If the CSR provides one, that one is used.
|
||||||
- This is only used by the V(ownca) provider.
|
- This is only used by the V(ownca) provider.
|
||||||
- Note that this is only supported if the C(cryptography) backend is used!
|
|
||||||
type: str
|
type: str
|
||||||
choices: [create_if_not_provided, always_create, never_create]
|
choices: [create_if_not_provided, always_create, never_create]
|
||||||
default: create_if_not_provided
|
default: create_if_not_provided
|
||||||
@@ -319,12 +323,11 @@ options:
|
|||||||
- The Authority Key Identifier is generated from the CA certificate's Subject Key Identifier,
|
- The Authority Key Identifier is generated from the CA certificate's Subject Key Identifier,
|
||||||
if available. If it is not available, the CA certificate's public key will be used.
|
if available. If it is not available, the CA certificate's public key will be used.
|
||||||
- This is only used by the V(ownca) provider.
|
- This is only used by the V(ownca) provider.
|
||||||
- Note that this is only supported if the C(cryptography) backend is used!
|
|
||||||
type: bool
|
type: bool
|
||||||
default: true
|
default: true
|
||||||
'''
|
"""
|
||||||
|
|
||||||
BACKEND_SELFSIGNED_DOCUMENTATION = r'''
|
BACKEND_SELFSIGNED_DOCUMENTATION = r"""
|
||||||
notes:
|
notes:
|
||||||
- For the V(selfsigned) provider, O(csr_path) and O(csr_content) are optional. If not provided, a
|
- For the V(selfsigned) provider, O(csr_path) and O(csr_content) are optional. If not provided, a
|
||||||
certificate without any information (Subject, Subject Alternative Names, Key Usage, etc.) is created.
|
certificate without any information (Subject, Subject Alternative Names, Key Usage, etc.) is created.
|
||||||
@@ -353,6 +356,8 @@ options:
|
|||||||
- This is only used by the V(selfsigned) provider.
|
- This is only used by the V(selfsigned) provider.
|
||||||
type: int
|
type: int
|
||||||
default: 3
|
default: 3
|
||||||
|
choices:
|
||||||
|
- 3
|
||||||
|
|
||||||
selfsigned_digest:
|
selfsigned_digest:
|
||||||
description:
|
description:
|
||||||
@@ -375,7 +380,8 @@ options:
|
|||||||
- This is only used by the V(selfsigned) provider.
|
- This is only used by the V(selfsigned) provider.
|
||||||
type: str
|
type: str
|
||||||
default: +0s
|
default: +0s
|
||||||
aliases: [ selfsigned_notBefore ]
|
aliases:
|
||||||
|
- selfsigned_notBefore
|
||||||
|
|
||||||
selfsigned_not_after:
|
selfsigned_not_after:
|
||||||
description:
|
description:
|
||||||
@@ -393,7 +399,8 @@ options:
|
|||||||
Please see U(https://support.apple.com/en-us/HT210176) for more details.
|
Please see U(https://support.apple.com/en-us/HT210176) for more details.
|
||||||
type: str
|
type: str
|
||||||
default: +3650d
|
default: +3650d
|
||||||
aliases: [ selfsigned_notAfter ]
|
aliases:
|
||||||
|
- selfsigned_notAfter
|
||||||
|
|
||||||
selfsigned_create_subject_key_identifier:
|
selfsigned_create_subject_key_identifier:
|
||||||
description:
|
description:
|
||||||
@@ -404,8 +411,7 @@ options:
|
|||||||
ignored.
|
ignored.
|
||||||
- A value of V(never_create) never creates a SKI. If the CSR provides one, that one is used.
|
- A value of V(never_create) never creates a SKI. If the CSR provides one, that one is used.
|
||||||
- This is only used by the V(selfsigned) provider.
|
- This is only used by the V(selfsigned) provider.
|
||||||
- Note that this is only supported if the C(cryptography) backend is used!
|
|
||||||
type: str
|
type: str
|
||||||
choices: [create_if_not_provided, always_create, never_create]
|
choices: [create_if_not_provided, always_create, never_create]
|
||||||
default: create_if_not_provided
|
default: create_if_not_provided
|
||||||
'''
|
"""
|
||||||
@@ -1,14 +1,14 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright (c) 2017, Yanis Guenane <yanis+ansible@guenane.org>
|
# Copyright (c) 2017, Yanis Guenane <yanis+ansible@guenane.org>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
# Note that this doc fragment is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
__metaclass__ = type
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
class ModuleDocFragment(object):
|
class ModuleDocFragment:
|
||||||
|
|
||||||
# Standard files documentation fragment
|
# Standard files documentation fragment
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
@@ -21,7 +21,7 @@ attributes:
|
|||||||
idempotent:
|
idempotent:
|
||||||
support: full
|
support: full
|
||||||
requirements:
|
requirements:
|
||||||
- cryptography >= 1.3
|
- cryptography >= 3.3
|
||||||
options:
|
options:
|
||||||
digest:
|
digest:
|
||||||
description:
|
description:
|
||||||
@@ -73,37 +73,51 @@ options:
|
|||||||
description:
|
description:
|
||||||
- The countryName field of the certificate signing request subject.
|
- The countryName field of the certificate signing request subject.
|
||||||
type: str
|
type: str
|
||||||
aliases: [C, countryName]
|
aliases:
|
||||||
|
- C
|
||||||
|
- countryName
|
||||||
state_or_province_name:
|
state_or_province_name:
|
||||||
description:
|
description:
|
||||||
- The stateOrProvinceName field of the certificate signing request subject.
|
- The stateOrProvinceName field of the certificate signing request subject.
|
||||||
type: str
|
type: str
|
||||||
aliases: [ST, stateOrProvinceName]
|
aliases:
|
||||||
|
- ST
|
||||||
|
- stateOrProvinceName
|
||||||
locality_name:
|
locality_name:
|
||||||
description:
|
description:
|
||||||
- The localityName field of the certificate signing request subject.
|
- The localityName field of the certificate signing request subject.
|
||||||
type: str
|
type: str
|
||||||
aliases: [L, localityName]
|
aliases:
|
||||||
|
- L
|
||||||
|
- localityName
|
||||||
organization_name:
|
organization_name:
|
||||||
description:
|
description:
|
||||||
- The organizationName field of the certificate signing request subject.
|
- The organizationName field of the certificate signing request subject.
|
||||||
type: str
|
type: str
|
||||||
aliases: [O, organizationName]
|
aliases:
|
||||||
|
- O
|
||||||
|
- organizationName
|
||||||
organizational_unit_name:
|
organizational_unit_name:
|
||||||
description:
|
description:
|
||||||
- The organizationalUnitName field of the certificate signing request subject.
|
- The organizationalUnitName field of the certificate signing request subject.
|
||||||
type: str
|
type: str
|
||||||
aliases: [OU, organizationalUnitName]
|
aliases:
|
||||||
|
- OU
|
||||||
|
- organizationalUnitName
|
||||||
common_name:
|
common_name:
|
||||||
description:
|
description:
|
||||||
- The commonName field of the certificate signing request subject.
|
- The commonName field of the certificate signing request subject.
|
||||||
type: str
|
type: str
|
||||||
aliases: [CN, commonName]
|
aliases:
|
||||||
|
- CN
|
||||||
|
- commonName
|
||||||
email_address:
|
email_address:
|
||||||
description:
|
description:
|
||||||
- The emailAddress field of the certificate signing request subject.
|
- The emailAddress field of the certificate signing request subject.
|
||||||
type: str
|
type: str
|
||||||
aliases: [E, emailAddress]
|
aliases:
|
||||||
|
- E
|
||||||
|
- emailAddress
|
||||||
subject_alt_name:
|
subject_alt_name:
|
||||||
description:
|
description:
|
||||||
- Subject Alternative Name (SAN) extension to attach to the certificate signing request.
|
- Subject Alternative Name (SAN) extension to attach to the certificate signing request.
|
||||||
@@ -114,63 +128,75 @@ options:
|
|||||||
- More at U(https://tools.ietf.org/html/rfc5280#section-4.2.1.6).
|
- More at U(https://tools.ietf.org/html/rfc5280#section-4.2.1.6).
|
||||||
type: list
|
type: list
|
||||||
elements: str
|
elements: str
|
||||||
aliases: [subjectAltName]
|
aliases:
|
||||||
|
- subjectAltName
|
||||||
subject_alt_name_critical:
|
subject_alt_name_critical:
|
||||||
description:
|
description:
|
||||||
- Should the subjectAltName extension be considered as critical.
|
- Should the subjectAltName extension be considered as critical.
|
||||||
type: bool
|
type: bool
|
||||||
default: false
|
default: false
|
||||||
aliases: [subjectAltName_critical]
|
aliases:
|
||||||
|
- subjectAltName_critical
|
||||||
use_common_name_for_san:
|
use_common_name_for_san:
|
||||||
description:
|
description:
|
||||||
- If set to V(true), the module will fill the common name in for O(subject_alt_name) with C(DNS:) prefix if no SAN is
|
- If set to V(true), the module will fill the common name in for O(subject_alt_name) with C(DNS:) prefix if no SAN is
|
||||||
specified.
|
specified.
|
||||||
type: bool
|
type: bool
|
||||||
default: true
|
default: true
|
||||||
aliases: [useCommonNameForSAN]
|
aliases:
|
||||||
|
- useCommonNameForSAN
|
||||||
key_usage:
|
key_usage:
|
||||||
description:
|
description:
|
||||||
- This defines the purpose (for example encipherment, signature, certificate signing) of the key contained in the certificate.
|
- This defines the purpose (for example encipherment, signature, certificate signing) of the key contained in the certificate.
|
||||||
type: list
|
type: list
|
||||||
elements: str
|
elements: str
|
||||||
aliases: [keyUsage]
|
aliases:
|
||||||
|
- keyUsage
|
||||||
key_usage_critical:
|
key_usage_critical:
|
||||||
description:
|
description:
|
||||||
- Should the keyUsage extension be considered as critical.
|
- Should the keyUsage extension be considered as critical.
|
||||||
type: bool
|
type: bool
|
||||||
default: false
|
default: false
|
||||||
aliases: [keyUsage_critical]
|
aliases:
|
||||||
|
- keyUsage_critical
|
||||||
extended_key_usage:
|
extended_key_usage:
|
||||||
description:
|
description:
|
||||||
- Additional restrictions (for example client authentication, server authentication) on the allowed purposes for which
|
- Additional restrictions (for example client authentication, server authentication) on the allowed purposes for which
|
||||||
the public key may be used.
|
the public key may be used.
|
||||||
type: list
|
type: list
|
||||||
elements: str
|
elements: str
|
||||||
aliases: [extKeyUsage, extendedKeyUsage]
|
aliases:
|
||||||
|
- extKeyUsage
|
||||||
|
- extendedKeyUsage
|
||||||
extended_key_usage_critical:
|
extended_key_usage_critical:
|
||||||
description:
|
description:
|
||||||
- Should the extkeyUsage extension be considered as critical.
|
- Should the extkeyUsage extension be considered as critical.
|
||||||
type: bool
|
type: bool
|
||||||
default: false
|
default: false
|
||||||
aliases: [extKeyUsage_critical, extendedKeyUsage_critical]
|
aliases:
|
||||||
|
- extKeyUsage_critical
|
||||||
|
- extendedKeyUsage_critical
|
||||||
basic_constraints:
|
basic_constraints:
|
||||||
description:
|
description:
|
||||||
- Indicates basic constraints, such as if the certificate is a CA.
|
- Indicates basic constraints, such as if the certificate is a CA.
|
||||||
type: list
|
type: list
|
||||||
elements: str
|
elements: str
|
||||||
aliases: [basicConstraints]
|
aliases:
|
||||||
|
- basicConstraints
|
||||||
basic_constraints_critical:
|
basic_constraints_critical:
|
||||||
description:
|
description:
|
||||||
- Should the basicConstraints extension be considered as critical.
|
- Should the basicConstraints extension be considered as critical.
|
||||||
type: bool
|
type: bool
|
||||||
default: false
|
default: false
|
||||||
aliases: [basicConstraints_critical]
|
aliases:
|
||||||
|
- basicConstraints_critical
|
||||||
ocsp_must_staple:
|
ocsp_must_staple:
|
||||||
description:
|
description:
|
||||||
- Indicates that the certificate should contain the OCSP Must Staple extension (U(https://tools.ietf.org/html/rfc7633)).
|
- Indicates that the certificate should contain the OCSP Must Staple extension (U(https://tools.ietf.org/html/rfc7633)).
|
||||||
type: bool
|
type: bool
|
||||||
default: false
|
default: false
|
||||||
aliases: [ocspMustStaple]
|
aliases:
|
||||||
|
- ocspMustStaple
|
||||||
ocsp_must_staple_critical:
|
ocsp_must_staple_critical:
|
||||||
description:
|
description:
|
||||||
- Should the OCSP Must Staple extension be considered as critical.
|
- Should the OCSP Must Staple extension be considered as critical.
|
||||||
@@ -178,7 +204,8 @@ options:
|
|||||||
OCSP Must Staple are required to reject such certificates (see U(https://tools.ietf.org/html/rfc7633#section-4)).
|
OCSP Must Staple are required to reject such certificates (see U(https://tools.ietf.org/html/rfc7633#section-4)).
|
||||||
type: bool
|
type: bool
|
||||||
default: false
|
default: false
|
||||||
aliases: [ocspMustStaple_critical]
|
aliases:
|
||||||
|
- ocspMustStaple_critical
|
||||||
name_constraints_permitted:
|
name_constraints_permitted:
|
||||||
description:
|
description:
|
||||||
- For CA certificates, this specifies a list of identifiers which describe subtrees of names that this CA is allowed
|
- For CA certificates, this specifies a list of identifiers which describe subtrees of names that this CA is allowed
|
||||||
@@ -205,6 +232,9 @@ options:
|
|||||||
- Determines which crypto backend to use.
|
- Determines which crypto backend to use.
|
||||||
- The default choice is V(auto), which tries to use C(cryptography) if available.
|
- The default choice is V(auto), which tries to use C(cryptography) if available.
|
||||||
- If set to V(cryptography), will try to use the L(cryptography,https://cryptography.io/) library.
|
- If set to V(cryptography), will try to use the L(cryptography,https://cryptography.io/) library.
|
||||||
|
- Note that with community.crypto 3.0.0, all values behave the same.
|
||||||
|
This option will be deprecated in a later version.
|
||||||
|
We recommend to not set it explicitly.
|
||||||
type: str
|
type: str
|
||||||
default: auto
|
default: auto
|
||||||
choices: [auto, cryptography]
|
choices: [auto, cryptography]
|
||||||
@@ -213,7 +243,6 @@ options:
|
|||||||
- Create the Subject Key Identifier from the public key.
|
- Create the Subject Key Identifier from the public key.
|
||||||
- Please note that commercial CAs can ignore the value, respectively use a value of their own choice instead. Specifying
|
- Please note that commercial CAs can ignore the value, respectively use a value of their own choice instead. Specifying
|
||||||
this option is mostly useful for self-signed certificates or for own CAs.
|
this option is mostly useful for self-signed certificates or for own CAs.
|
||||||
- Note that this is only supported if the C(cryptography) backend is used!
|
|
||||||
type: bool
|
type: bool
|
||||||
default: false
|
default: false
|
||||||
subject_key_identifier:
|
subject_key_identifier:
|
||||||
@@ -223,7 +252,6 @@ options:
|
|||||||
- Please note that commercial CAs ignore this value, respectively use a value of their own choice. Specifying this option
|
- Please note that commercial CAs ignore this value, respectively use a value of their own choice. Specifying this option
|
||||||
is mostly useful for self-signed certificates or for own CAs.
|
is mostly useful for self-signed certificates or for own CAs.
|
||||||
- Note that this option can only be used if O(create_subject_key_identifier) is V(false).
|
- Note that this option can only be used if O(create_subject_key_identifier) is V(false).
|
||||||
- Note that this is only supported if the C(cryptography) backend is used!
|
|
||||||
type: str
|
type: str
|
||||||
authority_key_identifier:
|
authority_key_identifier:
|
||||||
description:
|
description:
|
||||||
@@ -231,7 +259,6 @@ options:
|
|||||||
- 'Example: V(00:11:22:33:44:55:66:77:88:99:aa:bb:cc:dd:ee:ff:00:11:22:33).'
|
- 'Example: V(00:11:22:33:44:55:66:77:88:99:aa:bb:cc:dd:ee:ff:00:11:22:33).'
|
||||||
- Please note that commercial CAs ignore this value, respectively use a value of their own choice. Specifying this option
|
- Please note that commercial CAs ignore this value, respectively use a value of their own choice. Specifying this option
|
||||||
is mostly useful for self-signed certificates or for own CAs.
|
is mostly useful for self-signed certificates or for own CAs.
|
||||||
- Note that this is only supported if the C(cryptography) backend is used!
|
|
||||||
- The C(AuthorityKeyIdentifier) extension will only be added if at least one of O(authority_key_identifier), O(authority_cert_issuer)
|
- The C(AuthorityKeyIdentifier) extension will only be added if at least one of O(authority_key_identifier), O(authority_cert_issuer)
|
||||||
and O(authority_cert_serial_number) is specified.
|
and O(authority_cert_serial_number) is specified.
|
||||||
type: str
|
type: str
|
||||||
@@ -244,7 +271,6 @@ options:
|
|||||||
- If specified, O(authority_cert_serial_number) must also be specified.
|
- If specified, O(authority_cert_serial_number) must also be specified.
|
||||||
- Please note that commercial CAs ignore this value, respectively use a value of their own choice. Specifying this option
|
- Please note that commercial CAs ignore this value, respectively use a value of their own choice. Specifying this option
|
||||||
is mostly useful for self-signed certificates or for own CAs.
|
is mostly useful for self-signed certificates or for own CAs.
|
||||||
- Note that this is only supported if the C(cryptography) backend is used!
|
|
||||||
- The C(AuthorityKeyIdentifier) extension will only be added if at least one of O(authority_key_identifier), O(authority_cert_issuer)
|
- The C(AuthorityKeyIdentifier) extension will only be added if at least one of O(authority_key_identifier), O(authority_cert_issuer)
|
||||||
and O(authority_cert_serial_number) is specified.
|
and O(authority_cert_serial_number) is specified.
|
||||||
type: list
|
type: list
|
||||||
@@ -253,7 +279,6 @@ options:
|
|||||||
description:
|
description:
|
||||||
- The authority cert serial number.
|
- The authority cert serial number.
|
||||||
- If specified, O(authority_cert_issuer) must also be specified.
|
- If specified, O(authority_cert_issuer) must also be specified.
|
||||||
- Note that this is only supported if the C(cryptography) backend is used!
|
|
||||||
- Please note that commercial CAs ignore this value, respectively use a value of their own choice. Specifying this option
|
- Please note that commercial CAs ignore this value, respectively use a value of their own choice. Specifying this option
|
||||||
is mostly useful for self-signed certificates or for own CAs.
|
is mostly useful for self-signed certificates or for own CAs.
|
||||||
- The C(AuthorityKeyIdentifier) extension will only be added if at least one of O(authority_key_identifier), O(authority_cert_issuer)
|
- The C(AuthorityKeyIdentifier) extension will only be added if at least one of O(authority_key_identifier), O(authority_cert_issuer)
|
||||||
@@ -264,7 +289,6 @@ options:
|
|||||||
crl_distribution_points:
|
crl_distribution_points:
|
||||||
description:
|
description:
|
||||||
- Allows to specify one or multiple CRL distribution points.
|
- Allows to specify one or multiple CRL distribution points.
|
||||||
- Only supported by the C(cryptography) backend.
|
|
||||||
type: list
|
type: list
|
||||||
elements: dict
|
elements: dict
|
||||||
suboptions:
|
suboptions:
|
||||||
@@ -280,7 +304,6 @@ options:
|
|||||||
- Describes how the CRL can be retrieved relative to the CRL issuer.
|
- Describes how the CRL can be retrieved relative to the CRL issuer.
|
||||||
- Mutually exclusive with O(crl_distribution_points[].full_name).
|
- Mutually exclusive with O(crl_distribution_points[].full_name).
|
||||||
- 'Example: V(/CN=example.com).'
|
- 'Example: V(/CN=example.com).'
|
||||||
- Can only be used when cryptography >= 1.6 is installed.
|
|
||||||
type: list
|
type: list
|
||||||
elements: str
|
elements: str
|
||||||
crl_issuer:
|
crl_issuer:
|
||||||
@@ -1,14 +1,14 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright (c) 2016, Yanis Guenane <yanis+ansible@guenane.org>
|
# Copyright (c) 2016, Yanis Guenane <yanis+ansible@guenane.org>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
# Note that this doc fragment is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
__metaclass__ = type
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
class ModuleDocFragment(object):
|
class ModuleDocFragment:
|
||||||
|
|
||||||
# Standard files documentation fragment
|
# Standard files documentation fragment
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
@@ -25,7 +25,7 @@ attributes:
|
|||||||
details:
|
details:
|
||||||
- The option O(regenerate=always) generally disables idempotency.
|
- The option O(regenerate=always) generally disables idempotency.
|
||||||
requirements:
|
requirements:
|
||||||
- cryptography >= 1.2.3 (older versions might work as well)
|
- cryptography >= 3.3
|
||||||
options:
|
options:
|
||||||
size:
|
size:
|
||||||
description:
|
description:
|
||||||
@@ -35,9 +35,6 @@ options:
|
|||||||
type:
|
type:
|
||||||
description:
|
description:
|
||||||
- The algorithm used to generate the TLS/SSL private key.
|
- The algorithm used to generate the TLS/SSL private key.
|
||||||
- Note that V(ECC), V(X25519), V(X448), V(Ed25519), and V(Ed448) require the C(cryptography) backend. V(X25519) needs
|
|
||||||
cryptography 2.5 or newer, while V(X448), V(Ed25519), and V(Ed448) require cryptography 2.6 or newer. For V(ECC),
|
|
||||||
the minimal cryptography version required depends on the O(curve) option.
|
|
||||||
type: str
|
type: str
|
||||||
default: RSA
|
default: RSA
|
||||||
choices: [DSA, ECC, Ed25519, Ed448, RSA, X25519, X448]
|
choices: [DSA, ECC, Ed25519, Ed448, RSA, X25519, X448]
|
||||||
@@ -84,6 +81,9 @@ options:
|
|||||||
- Determines which crypto backend to use.
|
- Determines which crypto backend to use.
|
||||||
- The default choice is V(auto), which tries to use C(cryptography) if available.
|
- The default choice is V(auto), which tries to use C(cryptography) if available.
|
||||||
- If set to V(cryptography), will try to use the L(cryptography,https://cryptography.io/) library.
|
- If set to V(cryptography), will try to use the L(cryptography,https://cryptography.io/) library.
|
||||||
|
- Note that with community.crypto 3.0.0, all values behave the same.
|
||||||
|
This option will be deprecated in a later version.
|
||||||
|
We recommend to not set it explicitly.
|
||||||
type: str
|
type: str
|
||||||
default: auto
|
default: auto
|
||||||
choices: [auto, cryptography]
|
choices: [auto, cryptography]
|
||||||
@@ -104,7 +104,6 @@ options:
|
|||||||
parameters are as expected.
|
parameters are as expected.
|
||||||
- If set to V(regenerate) (default), generates a new private key.
|
- If set to V(regenerate) (default), generates a new private key.
|
||||||
- If set to V(convert), the key will be converted to the new format instead.
|
- If set to V(convert), the key will be converted to the new format instead.
|
||||||
- Only supported by the C(cryptography) backend.
|
|
||||||
type: str
|
type: str
|
||||||
default: regenerate
|
default: regenerate
|
||||||
choices: [regenerate, convert]
|
choices: [regenerate, convert]
|
||||||
@@ -1,19 +1,19 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
# Note that this doc fragment is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
__metaclass__ = type
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
class ModuleDocFragment(object):
|
class ModuleDocFragment:
|
||||||
|
|
||||||
# Standard files documentation fragment
|
# Standard files documentation fragment
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
requirements:
|
requirements:
|
||||||
- cryptography >= 1.2.3 (older versions might work as well)
|
- cryptography >= 3.3
|
||||||
attributes:
|
attributes:
|
||||||
diff_mode:
|
diff_mode:
|
||||||
support: none
|
support: none
|
||||||
@@ -1,14 +1,14 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
# Note that this doc fragment is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
__metaclass__ = type
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
class ModuleDocFragment(object):
|
class ModuleDocFragment:
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
options:
|
options:
|
||||||
name_encoding:
|
name_encoding:
|
||||||
@@ -1,10 +1,9 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright (c) 2023, Felix Fontein <felix@fontein.de>
|
# Copyright (c) 2023, Felix Fontein <felix@fontein.de>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
name: gpg_fingerprint
|
name: gpg_fingerprint
|
||||||
@@ -27,6 +26,7 @@ seealso:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
EXAMPLES = r"""
|
EXAMPLES = r"""
|
||||||
|
---
|
||||||
- name: Show fingerprint of GPG public key
|
- name: Show fingerprint of GPG public key
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: "{{ lookup('file', '/path/to/public_key.gpg') | community.crypto.gpg_fingerprint }}"
|
msg: "{{ lookup('file', '/path/to/public_key.gpg') | community.crypto.gpg_fingerprint }}"
|
||||||
@@ -39,30 +39,37 @@ _value:
|
|||||||
type: string
|
type: string
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import typing as t
|
||||||
|
|
||||||
from ansible.errors import AnsibleFilterError
|
from ansible.errors import AnsibleFilterError
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
from ansible.module_utils.six import string_types
|
from ansible_collections.community.crypto.plugins.module_utils._gnupg.cli import (
|
||||||
|
GPGError,
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.gnupg.cli import GPGError, get_fingerprint_from_bytes
|
get_fingerprint_from_bytes,
|
||||||
from ansible_collections.community.crypto.plugins.plugin_utils.gnupg import PluginGPGRunner
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._gnupg import (
|
||||||
|
PluginGPGRunner,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def gpg_fingerprint(input):
|
def gpg_fingerprint(gpg_key_content: str | bytes) -> str:
|
||||||
if not isinstance(input, string_types):
|
if not isinstance(gpg_key_content, (str, bytes)):
|
||||||
raise AnsibleFilterError(
|
raise AnsibleFilterError(
|
||||||
'The input for the community.crypto.gpg_fingerprint filter must be a string; got {type} instead'.format(type=type(input))
|
f"The input for the community.crypto.gpg_fingerprint filter must be a string; got {type(gpg_key_content)} instead"
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
gpg = PluginGPGRunner()
|
gpg = PluginGPGRunner()
|
||||||
return get_fingerprint_from_bytes(gpg, to_bytes(input))
|
return get_fingerprint_from_bytes(
|
||||||
|
gpg_runner=gpg, content=to_bytes(gpg_key_content)
|
||||||
|
)
|
||||||
except GPGError as exc:
|
except GPGError as exc:
|
||||||
raise AnsibleFilterError(to_native(exc))
|
raise AnsibleFilterError(str(exc)) from exc
|
||||||
|
|
||||||
|
|
||||||
class FilterModule(object):
|
class FilterModule:
|
||||||
'''Ansible jinja2 filters'''
|
"""Ansible jinja2 filters"""
|
||||||
|
|
||||||
def filters(self):
|
def filters(self) -> dict[str, t.Callable]:
|
||||||
return {
|
return {
|
||||||
'gpg_fingerprint': gpg_fingerprint,
|
"gpg_fingerprint": gpg_fingerprint,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,9 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
name: openssl_csr_info
|
name: openssl_csr_info
|
||||||
@@ -23,7 +21,7 @@ options:
|
|||||||
type: string
|
type: string
|
||||||
required: true
|
required: true
|
||||||
extends_documentation_fragment:
|
extends_documentation_fragment:
|
||||||
- community.crypto.name_encoding
|
- community.crypto._name_encoding
|
||||||
seealso:
|
seealso:
|
||||||
- module: community.crypto.openssl_csr_info
|
- module: community.crypto.openssl_csr_info
|
||||||
- plugin: community.crypto.to_serial
|
- plugin: community.crypto.to_serial
|
||||||
@@ -31,6 +29,7 @@ seealso:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
EXAMPLES = r"""
|
EXAMPLES = r"""
|
||||||
|
---
|
||||||
- name: Show the Subject Alt Names of the CSR
|
- name: Show the Subject Alt Names of the CSR
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: >-
|
msg: >-
|
||||||
@@ -275,42 +274,52 @@ _value:
|
|||||||
sample: 12345
|
sample: 12345
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from ansible.errors import AnsibleFilterError
|
import typing as t
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
|
||||||
|
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.crypto.basic import (
|
from ansible.errors import AnsibleFilterError
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
OpenSSLObjectError,
|
OpenSSLObjectError,
|
||||||
)
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.csr_info import (
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.crypto.module_backends.csr_info import (
|
|
||||||
get_csr_info,
|
get_csr_info,
|
||||||
)
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._filter_module import (
|
||||||
from ansible_collections.community.crypto.plugins.plugin_utils.filter_module import FilterModuleMock
|
FilterModuleMock,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def openssl_csr_info_filter(data, name_encoding='ignore'):
|
def openssl_csr_info_filter(
|
||||||
'''Extract information from X.509 PEM certificate.'''
|
data: str | bytes, name_encoding: t.Literal["ignore", "idna", "unicode"] = "ignore"
|
||||||
if not isinstance(data, string_types):
|
) -> dict[str, t.Any]:
|
||||||
raise AnsibleFilterError('The community.crypto.openssl_csr_info input must be a text type, not %s' % type(data))
|
"""Extract information from X.509 PEM certificate."""
|
||||||
if not isinstance(name_encoding, string_types):
|
if not isinstance(data, (str, bytes)):
|
||||||
raise AnsibleFilterError('The name_encoding option must be of a text type, not %s' % type(name_encoding))
|
raise AnsibleFilterError(
|
||||||
name_encoding = to_native(name_encoding)
|
f"The community.crypto.openssl_csr_info input must be a text type, not {type(data)}"
|
||||||
if name_encoding not in ('ignore', 'idna', 'unicode'):
|
)
|
||||||
raise AnsibleFilterError('The name_encoding option must be one of the values "ignore", "idna", or "unicode", not "%s"' % name_encoding)
|
if not isinstance(name_encoding, (str, bytes)):
|
||||||
|
raise AnsibleFilterError(
|
||||||
|
f"The name_encoding option must be of a text type, not {type(name_encoding)}"
|
||||||
|
)
|
||||||
|
name_encoding = to_text(name_encoding)
|
||||||
|
if name_encoding not in ("ignore", "idna", "unicode"):
|
||||||
|
raise AnsibleFilterError(
|
||||||
|
f'The name_encoding option must be one of the values "ignore", "idna", or "unicode", not "{name_encoding}"'
|
||||||
|
)
|
||||||
|
|
||||||
module = FilterModuleMock({'name_encoding': name_encoding})
|
module = FilterModuleMock({"name_encoding": name_encoding})
|
||||||
try:
|
try:
|
||||||
return get_csr_info(module, 'cryptography', content=to_bytes(data), validate_signature=True)
|
return get_csr_info(
|
||||||
|
module=module, content=to_bytes(data), validate_signature=True
|
||||||
|
)
|
||||||
except OpenSSLObjectError as exc:
|
except OpenSSLObjectError as exc:
|
||||||
raise AnsibleFilterError(to_native(exc))
|
raise AnsibleFilterError(str(exc)) from exc
|
||||||
|
|
||||||
|
|
||||||
class FilterModule(object):
|
class FilterModule:
|
||||||
'''Ansible jinja2 filters'''
|
"""Ansible jinja2 filters"""
|
||||||
|
|
||||||
def filters(self):
|
def filters(self) -> dict[str, t.Callable]:
|
||||||
return {
|
return {
|
||||||
'openssl_csr_info': openssl_csr_info_filter,
|
"openssl_csr_info": openssl_csr_info_filter,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,9 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
name: openssl_privatekey_info
|
name: openssl_privatekey_info
|
||||||
@@ -34,12 +32,13 @@ options:
|
|||||||
type: bool
|
type: bool
|
||||||
default: false
|
default: false
|
||||||
extends_documentation_fragment:
|
extends_documentation_fragment:
|
||||||
- community.crypto.name_encoding
|
- community.crypto._name_encoding
|
||||||
seealso:
|
seealso:
|
||||||
- module: community.crypto.openssl_privatekey_info
|
- module: community.crypto.openssl_privatekey_info
|
||||||
"""
|
"""
|
||||||
|
|
||||||
EXAMPLES = r"""
|
EXAMPLES = r"""
|
||||||
|
---
|
||||||
- name: Show the Subject Alt Names of the CSR
|
- name: Show the Subject Alt Names of the CSR
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: >-
|
msg: >-
|
||||||
@@ -147,47 +146,62 @@ _value:
|
|||||||
type: dict
|
type: dict
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from ansible.errors import AnsibleFilterError
|
import typing as t
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
|
||||||
|
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.crypto.basic import (
|
from ansible.errors import AnsibleFilterError
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
OpenSSLObjectError,
|
OpenSSLObjectError,
|
||||||
)
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.privatekey_info import (
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.crypto.module_backends.privatekey_info import (
|
|
||||||
PrivateKeyParseError,
|
PrivateKeyParseError,
|
||||||
get_privatekey_info,
|
get_privatekey_info,
|
||||||
)
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._filter_module import (
|
||||||
from ansible_collections.community.crypto.plugins.plugin_utils.filter_module import FilterModuleMock
|
FilterModuleMock,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def openssl_privatekey_info_filter(data, passphrase=None, return_private_key_data=False):
|
def openssl_privatekey_info_filter(
|
||||||
'''Extract information from X.509 PEM certificate.'''
|
data: str | bytes,
|
||||||
if not isinstance(data, string_types):
|
passphrase: str | bytes | None = None,
|
||||||
raise AnsibleFilterError('The community.crypto.openssl_privatekey_info input must be a text type, not %s' % type(data))
|
return_private_key_data: bool = False,
|
||||||
if passphrase is not None and not isinstance(passphrase, string_types):
|
) -> dict[str, t.Any]:
|
||||||
raise AnsibleFilterError('The passphrase option must be a text type, not %s' % type(passphrase))
|
"""Extract information from X.509 PEM certificate."""
|
||||||
|
if not isinstance(data, (str, bytes)):
|
||||||
|
raise AnsibleFilterError(
|
||||||
|
f"The community.crypto.openssl_privatekey_info input must be a text type, not {type(data)}"
|
||||||
|
)
|
||||||
|
if passphrase is not None and not isinstance(passphrase, (str, bytes)):
|
||||||
|
raise AnsibleFilterError(
|
||||||
|
f"The passphrase option must be a text type, not {type(passphrase)}"
|
||||||
|
)
|
||||||
if not isinstance(return_private_key_data, bool):
|
if not isinstance(return_private_key_data, bool):
|
||||||
raise AnsibleFilterError('The return_private_key_data option must be a boolean, not %s' % type(return_private_key_data))
|
raise AnsibleFilterError(
|
||||||
|
f"The return_private_key_data option must be a boolean, not {type(return_private_key_data)}"
|
||||||
|
)
|
||||||
|
|
||||||
module = FilterModuleMock({})
|
module = FilterModuleMock({})
|
||||||
try:
|
try:
|
||||||
result = get_privatekey_info(module, 'cryptography', content=to_bytes(data), passphrase=passphrase, return_private_key_data=return_private_key_data)
|
result = get_privatekey_info(
|
||||||
result.pop('can_parse_key', None)
|
module=module,
|
||||||
result.pop('key_is_consistent', None)
|
content=to_bytes(data),
|
||||||
|
passphrase=to_text(passphrase) if passphrase is not None else None,
|
||||||
|
return_private_key_data=return_private_key_data,
|
||||||
|
)
|
||||||
|
result.pop("can_parse_key", None)
|
||||||
|
result.pop("key_is_consistent", None)
|
||||||
return result
|
return result
|
||||||
except PrivateKeyParseError as exc:
|
except PrivateKeyParseError as exc:
|
||||||
raise AnsibleFilterError(exc.error_message)
|
raise AnsibleFilterError(exc.error_message) from exc
|
||||||
except OpenSSLObjectError as exc:
|
except OpenSSLObjectError as exc:
|
||||||
raise AnsibleFilterError(to_native(exc))
|
raise AnsibleFilterError(str(exc)) from exc
|
||||||
|
|
||||||
|
|
||||||
class FilterModule(object):
|
class FilterModule:
|
||||||
'''Ansible jinja2 filters'''
|
"""Ansible jinja2 filters"""
|
||||||
|
|
||||||
def filters(self):
|
def filters(self) -> dict[str, t.Callable]:
|
||||||
return {
|
return {
|
||||||
'openssl_privatekey_info': openssl_privatekey_info_filter,
|
"openssl_privatekey_info": openssl_privatekey_info_filter,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,9 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
name: openssl_publickey_info
|
name: openssl_publickey_info
|
||||||
@@ -27,6 +25,7 @@ seealso:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
EXAMPLES = r"""
|
EXAMPLES = r"""
|
||||||
|
---
|
||||||
- name: Show the type of a public key
|
- name: Show the type of a public key
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: >-
|
msg: >-
|
||||||
@@ -124,40 +123,42 @@ _value:
|
|||||||
returned: When RV(_value.type=DSA) or RV(_value.type=ECC)
|
returned: When RV(_value.type=DSA) or RV(_value.type=ECC)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from ansible.errors import AnsibleFilterError
|
import typing as t
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
|
||||||
|
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.crypto.basic import (
|
from ansible.errors import AnsibleFilterError
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
OpenSSLObjectError,
|
OpenSSLObjectError,
|
||||||
)
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.publickey_info import (
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.crypto.module_backends.publickey_info import (
|
|
||||||
PublicKeyParseError,
|
PublicKeyParseError,
|
||||||
get_publickey_info,
|
get_publickey_info,
|
||||||
)
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._filter_module import (
|
||||||
from ansible_collections.community.crypto.plugins.plugin_utils.filter_module import FilterModuleMock
|
FilterModuleMock,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def openssl_publickey_info_filter(data):
|
def openssl_publickey_info_filter(data: str | bytes) -> dict[str, t.Any]:
|
||||||
'''Extract information from OpenSSL PEM public key.'''
|
"""Extract information from OpenSSL PEM public key."""
|
||||||
if not isinstance(data, string_types):
|
if not isinstance(data, (str, bytes)):
|
||||||
raise AnsibleFilterError('The community.crypto.openssl_publickey_info input must be a text type, not %s' % type(data))
|
raise AnsibleFilterError(
|
||||||
|
f"The community.crypto.openssl_publickey_info input must be a text type, not {type(data)}"
|
||||||
|
)
|
||||||
|
|
||||||
module = FilterModuleMock({})
|
module = FilterModuleMock({})
|
||||||
try:
|
try:
|
||||||
return get_publickey_info(module, 'cryptography', content=to_bytes(data))
|
return get_publickey_info(module=module, content=to_bytes(data))
|
||||||
except PublicKeyParseError as exc:
|
except PublicKeyParseError as exc:
|
||||||
raise AnsibleFilterError(exc.error_message)
|
raise AnsibleFilterError(exc.error_message) from exc
|
||||||
except OpenSSLObjectError as exc:
|
except OpenSSLObjectError as exc:
|
||||||
raise AnsibleFilterError(to_native(exc))
|
raise AnsibleFilterError(str(exc)) from exc
|
||||||
|
|
||||||
|
|
||||||
class FilterModule(object):
|
class FilterModule:
|
||||||
'''Ansible jinja2 filters'''
|
"""Ansible jinja2 filters"""
|
||||||
|
|
||||||
def filters(self):
|
def filters(self) -> dict[str, t.Callable]:
|
||||||
return {
|
return {
|
||||||
'openssl_publickey_info': openssl_publickey_info_filter,
|
"openssl_publickey_info": openssl_publickey_info_filter,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright (c) 2024, Felix Fontein <felix@fontein.de>
|
# Copyright (c) 2024, Felix Fontein <felix@fontein.de>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
name: parse_serial
|
name: parse_serial
|
||||||
@@ -27,6 +26,7 @@ seealso:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
EXAMPLES = r"""
|
EXAMPLES = r"""
|
||||||
|
---
|
||||||
- name: Parse serial number
|
- name: Parse serial number
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: "{{ '11:22:33' | community.crypto.parse_serial }}"
|
msg: "{{ '11:22:33' | community.crypto.parse_serial }}"
|
||||||
@@ -39,28 +39,30 @@ _value:
|
|||||||
type: int
|
type: int
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import typing as t
|
||||||
|
|
||||||
from ansible.errors import AnsibleFilterError
|
from ansible.errors import AnsibleFilterError
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
from ansible.module_utils.six import string_types
|
from ansible_collections.community.crypto.plugins.module_utils._serial import (
|
||||||
|
parse_serial,
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.serial import parse_serial
|
)
|
||||||
|
|
||||||
|
|
||||||
def parse_serial_filter(input):
|
def parse_serial_filter(serial_str: str | bytes) -> int:
|
||||||
if not isinstance(input, string_types):
|
if not isinstance(serial_str, (str, bytes)):
|
||||||
raise AnsibleFilterError(
|
raise AnsibleFilterError(
|
||||||
'The input for the community.crypto.parse_serial filter must be a string; got {type} instead'.format(type=type(input))
|
f"The input for the community.crypto.parse_serial filter must be a string; got {type(serial_str)} instead"
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
return parse_serial(to_native(input))
|
return parse_serial(to_text(serial_str))
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
raise AnsibleFilterError(to_native(exc))
|
raise AnsibleFilterError(str(exc)) from exc
|
||||||
|
|
||||||
|
|
||||||
class FilterModule(object):
|
class FilterModule:
|
||||||
'''Ansible jinja2 filters'''
|
"""Ansible jinja2 filters"""
|
||||||
|
|
||||||
def filters(self):
|
def filters(self) -> dict[str, t.Callable]:
|
||||||
return {
|
return {
|
||||||
'parse_serial': parse_serial_filter,
|
"parse_serial": parse_serial_filter,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,9 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
name: split_pem
|
name: split_pem
|
||||||
@@ -24,6 +22,7 @@ options:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
EXAMPLES = r"""
|
EXAMPLES = r"""
|
||||||
|
---
|
||||||
- name: Print all CA certificates
|
- name: Print all CA certificates
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: '{{ item }}'
|
msg: '{{ item }}'
|
||||||
@@ -39,26 +38,29 @@ _value:
|
|||||||
elements: string
|
elements: string
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import typing as t
|
||||||
|
|
||||||
from ansible.errors import AnsibleFilterError
|
from ansible.errors import AnsibleFilterError
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.module_utils.common.text.converters import to_text
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.pem import (
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.crypto.pem import split_pem_list
|
split_pem_list,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def split_pem_filter(data):
|
def split_pem_filter(data: str | bytes) -> list[str]:
|
||||||
'''Split PEM file.'''
|
"""Split PEM file."""
|
||||||
if not isinstance(data, string_types):
|
if not isinstance(data, (str, bytes)):
|
||||||
raise AnsibleFilterError('The community.crypto.split_pem input must be a text type, not %s' % type(data))
|
raise AnsibleFilterError(
|
||||||
|
f"The community.crypto.split_pem input must be a text type, not {type(data)}"
|
||||||
|
)
|
||||||
|
|
||||||
data = to_text(data)
|
return split_pem_list(to_text(data))
|
||||||
return split_pem_list(data)
|
|
||||||
|
|
||||||
|
|
||||||
class FilterModule(object):
|
class FilterModule:
|
||||||
'''Ansible jinja2 filters'''
|
"""Ansible jinja2 filters"""
|
||||||
|
|
||||||
def filters(self):
|
def filters(self) -> dict[str, t.Callable]:
|
||||||
return {
|
return {
|
||||||
'split_pem': split_pem_filter,
|
"split_pem": split_pem_filter,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright (c) 2024, Felix Fontein <felix@fontein.de>
|
# Copyright (c) 2024, Felix Fontein <felix@fontein.de>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
name: to_serial
|
name: to_serial
|
||||||
@@ -25,6 +24,7 @@ seealso:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
EXAMPLES = r"""
|
EXAMPLES = r"""
|
||||||
|
---
|
||||||
- name: Convert integer to serial number
|
- name: Convert integer to serial number
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: "{{ 1234567 | community.crypto.to_serial }}"
|
msg: "{{ 1234567 | community.crypto.to_serial }}"
|
||||||
@@ -39,30 +39,31 @@ _value:
|
|||||||
type: string
|
type: string
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import typing as t
|
||||||
|
|
||||||
from ansible.errors import AnsibleFilterError
|
from ansible.errors import AnsibleFilterError
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
from ansible_collections.community.crypto.plugins.module_utils._serial import to_serial
|
||||||
from ansible.module_utils.six import integer_types
|
|
||||||
|
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.serial import to_serial
|
|
||||||
|
|
||||||
|
|
||||||
def to_serial_filter(input):
|
def to_serial_filter(serial_int: int) -> str:
|
||||||
if not isinstance(input, integer_types):
|
if not isinstance(serial_int, int):
|
||||||
raise AnsibleFilterError(
|
raise AnsibleFilterError(
|
||||||
'The input for the community.crypto.to_serial filter must be an integer; got {type} instead'.format(type=type(input))
|
f"The input for the community.crypto.to_serial filter must be an integer; got {type(serial_int)} instead"
|
||||||
|
)
|
||||||
|
if serial_int < 0:
|
||||||
|
raise AnsibleFilterError(
|
||||||
|
"The input for the community.crypto.to_serial filter must not be negative"
|
||||||
)
|
)
|
||||||
if input < 0:
|
|
||||||
raise AnsibleFilterError('The input for the community.crypto.to_serial filter must not be negative')
|
|
||||||
try:
|
try:
|
||||||
return to_serial(input)
|
return to_serial(serial_int)
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
raise AnsibleFilterError(to_native(exc))
|
raise AnsibleFilterError(str(exc)) from exc
|
||||||
|
|
||||||
|
|
||||||
class FilterModule(object):
|
class FilterModule:
|
||||||
'''Ansible jinja2 filters'''
|
"""Ansible jinja2 filters"""
|
||||||
|
|
||||||
def filters(self):
|
def filters(self) -> dict[str, t.Callable]:
|
||||||
return {
|
return {
|
||||||
'to_serial': to_serial_filter,
|
"to_serial": to_serial_filter,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,9 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
name: x509_certificate_info
|
name: x509_certificate_info
|
||||||
@@ -23,7 +21,7 @@ options:
|
|||||||
type: string
|
type: string
|
||||||
required: true
|
required: true
|
||||||
extends_documentation_fragment:
|
extends_documentation_fragment:
|
||||||
- community.crypto.name_encoding
|
- community.crypto._name_encoding
|
||||||
seealso:
|
seealso:
|
||||||
- module: community.crypto.x509_certificate_info
|
- module: community.crypto.x509_certificate_info
|
||||||
- plugin: community.crypto.to_serial
|
- plugin: community.crypto.to_serial
|
||||||
@@ -31,6 +29,7 @@ seealso:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
EXAMPLES = r"""
|
EXAMPLES = r"""
|
||||||
|
---
|
||||||
- name: Show the Subject Alt Names of the certificate
|
- name: Show the Subject Alt Names of the certificate
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: >-
|
msg: >-
|
||||||
@@ -309,42 +308,50 @@ _value:
|
|||||||
type: str
|
type: str
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from ansible.errors import AnsibleFilterError
|
import typing as t
|
||||||
from ansible.module_utils.six import string_types
|
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
|
||||||
|
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.crypto.basic import (
|
from ansible.errors import AnsibleFilterError
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
OpenSSLObjectError,
|
OpenSSLObjectError,
|
||||||
)
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.certificate_info import (
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.crypto.module_backends.certificate_info import (
|
|
||||||
get_certificate_info,
|
get_certificate_info,
|
||||||
)
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._filter_module import (
|
||||||
from ansible_collections.community.crypto.plugins.plugin_utils.filter_module import FilterModuleMock
|
FilterModuleMock,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def x509_certificate_info_filter(data, name_encoding='ignore'):
|
def x509_certificate_info_filter(
|
||||||
'''Extract information from X.509 PEM certificate.'''
|
data: str | bytes, name_encoding: t.Literal["ignore", "idna", "unicode"] = "ignore"
|
||||||
if not isinstance(data, string_types):
|
) -> dict[str, t.Any]:
|
||||||
raise AnsibleFilterError('The community.crypto.x509_certificate_info input must be a text type, not %s' % type(data))
|
"""Extract information from X.509 PEM certificate."""
|
||||||
if not isinstance(name_encoding, string_types):
|
if not isinstance(data, (str, bytes)):
|
||||||
raise AnsibleFilterError('The name_encoding option must be of a text type, not %s' % type(name_encoding))
|
raise AnsibleFilterError(
|
||||||
name_encoding = to_native(name_encoding)
|
f"The community.crypto.x509_certificate_info input must be a text type, not {type(data)}"
|
||||||
if name_encoding not in ('ignore', 'idna', 'unicode'):
|
)
|
||||||
raise AnsibleFilterError('The name_encoding option must be one of the values "ignore", "idna", or "unicode", not "%s"' % name_encoding)
|
if not isinstance(name_encoding, (str, bytes)):
|
||||||
|
raise AnsibleFilterError(
|
||||||
|
f"The name_encoding option must be of a text type, not {type(name_encoding)}"
|
||||||
|
)
|
||||||
|
name_encoding = to_text(name_encoding)
|
||||||
|
if name_encoding not in ("ignore", "idna", "unicode"):
|
||||||
|
raise AnsibleFilterError(
|
||||||
|
f'The name_encoding option must be one of the values "ignore", "idna", or "unicode", not "{name_encoding}"'
|
||||||
|
)
|
||||||
|
|
||||||
module = FilterModuleMock({'name_encoding': name_encoding})
|
module = FilterModuleMock({"name_encoding": name_encoding})
|
||||||
try:
|
try:
|
||||||
return get_certificate_info(module, 'cryptography', content=to_bytes(data))
|
return get_certificate_info(module=module, content=to_bytes(data))
|
||||||
except OpenSSLObjectError as exc:
|
except OpenSSLObjectError as exc:
|
||||||
raise AnsibleFilterError(to_native(exc))
|
raise AnsibleFilterError(str(exc)) from exc
|
||||||
|
|
||||||
|
|
||||||
class FilterModule(object):
|
class FilterModule:
|
||||||
'''Ansible jinja2 filters'''
|
"""Ansible jinja2 filters"""
|
||||||
|
|
||||||
def filters(self):
|
def filters(self) -> dict[str, t.Callable]:
|
||||||
return {
|
return {
|
||||||
'x509_certificate_info': x509_certificate_info_filter,
|
"x509_certificate_info": x509_certificate_info_filter,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,9 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
name: x509_crl_info
|
name: x509_crl_info
|
||||||
@@ -31,7 +29,7 @@ options:
|
|||||||
default: true
|
default: true
|
||||||
version_added: 1.7.0
|
version_added: 1.7.0
|
||||||
extends_documentation_fragment:
|
extends_documentation_fragment:
|
||||||
- community.crypto.name_encoding
|
- community.crypto._name_encoding
|
||||||
seealso:
|
seealso:
|
||||||
- module: community.crypto.x509_crl_info
|
- module: community.crypto.x509_crl_info
|
||||||
- plugin: community.crypto.to_serial
|
- plugin: community.crypto.to_serial
|
||||||
@@ -39,6 +37,7 @@ seealso:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
EXAMPLES = r"""
|
EXAMPLES = r"""
|
||||||
|
---
|
||||||
- name: Show the Organization Name of the CRL's subject
|
- name: Show the Organization Name of the CRL's subject
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: >-
|
msg: >-
|
||||||
@@ -156,56 +155,70 @@ _value:
|
|||||||
|
|
||||||
import base64
|
import base64
|
||||||
import binascii
|
import binascii
|
||||||
|
import typing as t
|
||||||
|
|
||||||
from ansible.errors import AnsibleFilterError
|
from ansible.errors import AnsibleFilterError
|
||||||
from ansible.module_utils.six import string_types
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
from ansible.module_utils.common.text.converters import to_bytes, to_native
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
|
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.crypto.basic import (
|
|
||||||
OpenSSLObjectError,
|
OpenSSLObjectError,
|
||||||
)
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.crl_info import (
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.crypto.pem import (
|
|
||||||
identify_pem_format,
|
|
||||||
)
|
|
||||||
|
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.crypto.module_backends.crl_info import (
|
|
||||||
get_crl_info,
|
get_crl_info,
|
||||||
)
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.pem import (
|
||||||
from ansible_collections.community.crypto.plugins.plugin_utils.filter_module import FilterModuleMock
|
identify_pem_format,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._filter_module import (
|
||||||
|
FilterModuleMock,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def x509_crl_info_filter(data, name_encoding='ignore', list_revoked_certificates=True):
|
def x509_crl_info_filter(
|
||||||
'''Extract information from X.509 PEM certificate.'''
|
data: str | bytes,
|
||||||
if not isinstance(data, string_types):
|
name_encoding: t.Literal["ignore", "idna", "unicode"] = "ignore",
|
||||||
raise AnsibleFilterError('The community.crypto.x509_crl_info input must be a text type, not %s' % type(data))
|
list_revoked_certificates: bool = True,
|
||||||
if not isinstance(name_encoding, string_types):
|
) -> dict[str, t.Any]:
|
||||||
raise AnsibleFilterError('The name_encoding option must be of a text type, not %s' % type(name_encoding))
|
"""Extract information from X.509 PEM certificate."""
|
||||||
|
if not isinstance(data, (str, bytes)):
|
||||||
|
raise AnsibleFilterError(
|
||||||
|
f"The community.crypto.x509_crl_info input must be a text type, not {type(data)}"
|
||||||
|
)
|
||||||
|
if not isinstance(name_encoding, (str, bytes)):
|
||||||
|
raise AnsibleFilterError(
|
||||||
|
f"The name_encoding option must be of a text type, not {type(name_encoding)}"
|
||||||
|
)
|
||||||
if not isinstance(list_revoked_certificates, bool):
|
if not isinstance(list_revoked_certificates, bool):
|
||||||
raise AnsibleFilterError('The list_revoked_certificates option must be a boolean, not %s' % type(list_revoked_certificates))
|
raise AnsibleFilterError(
|
||||||
name_encoding = to_native(name_encoding)
|
f"The list_revoked_certificates option must be a boolean, not {type(list_revoked_certificates)}"
|
||||||
if name_encoding not in ('ignore', 'idna', 'unicode'):
|
)
|
||||||
raise AnsibleFilterError('The name_encoding option must be one of the values "ignore", "idna", or "unicode", not "%s"' % name_encoding)
|
name_encoding = to_text(name_encoding)
|
||||||
|
if name_encoding not in ("ignore", "idna", "unicode"):
|
||||||
|
raise AnsibleFilterError(
|
||||||
|
f'The name_encoding option must be one of the values "ignore", "idna", or "unicode", not "{name_encoding}"'
|
||||||
|
)
|
||||||
|
|
||||||
data = to_bytes(data)
|
data_bytes = to_bytes(data)
|
||||||
if not identify_pem_format(data):
|
if not identify_pem_format(data_bytes):
|
||||||
try:
|
try:
|
||||||
data = base64.b64decode(to_native(data))
|
data_bytes = base64.b64decode(to_text(data_bytes))
|
||||||
except (binascii.Error, TypeError, ValueError, UnicodeEncodeError) as e:
|
except (binascii.Error, TypeError, ValueError, UnicodeEncodeError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
module = FilterModuleMock({'name_encoding': name_encoding})
|
module = FilterModuleMock({"name_encoding": name_encoding})
|
||||||
try:
|
try:
|
||||||
return get_crl_info(module, content=data, list_revoked_certificates=list_revoked_certificates)
|
return get_crl_info(
|
||||||
|
module=module,
|
||||||
|
content=data_bytes,
|
||||||
|
list_revoked_certificates=list_revoked_certificates,
|
||||||
|
)
|
||||||
except OpenSSLObjectError as exc:
|
except OpenSSLObjectError as exc:
|
||||||
raise AnsibleFilterError(to_native(exc))
|
raise AnsibleFilterError(str(exc)) from exc
|
||||||
|
|
||||||
|
|
||||||
class FilterModule(object):
|
class FilterModule:
|
||||||
'''Ansible jinja2 filters'''
|
"""Ansible jinja2 filters"""
|
||||||
|
|
||||||
def filters(self):
|
def filters(self) -> dict[str, t.Callable]:
|
||||||
return {
|
return {
|
||||||
'x509_crl_info': x509_crl_info_filter,
|
"x509_crl_info": x509_crl_info_filter,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright (c) 2023, Felix Fontein <felix@fontein.de>
|
# Copyright (c) 2023, Felix Fontein <felix@fontein.de>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
DOCUMENTATION = r"""
|
DOCUMENTATION = r"""
|
||||||
name: gpg_fingerprint
|
name: gpg_fingerprint
|
||||||
@@ -28,6 +27,7 @@ seealso:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
EXAMPLES = r"""
|
EXAMPLES = r"""
|
||||||
|
---
|
||||||
- name: Show fingerprint of GPG public key
|
- name: Show fingerprint of GPG public key
|
||||||
ansible.builtin.debug:
|
ansible.builtin.debug:
|
||||||
msg: "{{ lookup('community.crypto.gpg_fingerprint', '/path/to/public_key.gpg') }}"
|
msg: "{{ lookup('community.crypto.gpg_fingerprint', '/path/to/public_key.gpg') }}"
|
||||||
@@ -42,23 +42,38 @@ _value:
|
|||||||
elements: string
|
elements: string
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from ansible.plugins.lookup import LookupBase
|
import os
|
||||||
from ansible.errors import AnsibleLookupError
|
import typing as t
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
|
||||||
|
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.gnupg.cli import GPGError, get_fingerprint_from_file
|
from ansible.errors import AnsibleLookupError
|
||||||
from ansible_collections.community.crypto.plugins.plugin_utils.gnupg import PluginGPGRunner
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
from ansible.plugins.lookup import LookupBase
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._gnupg.cli import (
|
||||||
|
GPGError,
|
||||||
|
get_fingerprint_from_file,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._gnupg import (
|
||||||
|
PluginGPGRunner,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class LookupModule(LookupBase):
|
class LookupModule(LookupBase):
|
||||||
def run(self, terms, variables=None, **kwargs):
|
def run(self, terms: list[t.Any], variables=None, **kwargs) -> list[str]:
|
||||||
self.set_options(direct=kwargs)
|
self.set_options(direct=kwargs)
|
||||||
|
if self._loader is None:
|
||||||
|
raise AssertionError("Contract violation: self._loader is None")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
gpg = PluginGPGRunner(cwd=self._loader.get_basedir())
|
gpg = PluginGPGRunner(cwd=self._loader.get_basedir())
|
||||||
result = []
|
result = []
|
||||||
for path in terms:
|
for i, path in enumerate(terms):
|
||||||
result.append(get_fingerprint_from_file(gpg, path))
|
if not isinstance(path, (str, bytes, os.PathLike)):
|
||||||
|
raise AnsibleLookupError(
|
||||||
|
f"Lookup parameter #{i} should be string or a path object, but got {type(path)}"
|
||||||
|
)
|
||||||
|
result.append(
|
||||||
|
get_fingerprint_from_file(gpg_runner=gpg, path=to_text(path))
|
||||||
|
)
|
||||||
return result
|
return result
|
||||||
except GPGError as exc:
|
except GPGError as exc:
|
||||||
raise AnsibleLookupError(to_native(exc))
|
raise AnsibleLookupError(str(exc)) from exc
|
||||||
|
|||||||
346
plugins/module_utils/_acme/account.py
Normal file
346
plugins/module_utils/_acme/account.py
Normal file
@@ -0,0 +1,346 @@
|
|||||||
|
# Copyright (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
|
||||||
|
# Copyright (c) 2021 Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.common._collections_compat import Mapping
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.errors import (
|
||||||
|
ACMEProtocolException,
|
||||||
|
ModuleFailException,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.acme import (
|
||||||
|
ACMEClient,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ACMEAccount:
|
||||||
|
"""
|
||||||
|
ACME account object. Allows to create new accounts, check for existence of accounts,
|
||||||
|
retrieve account data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *, client: ACMEClient) -> None:
|
||||||
|
# Set to true to enable logging of all signed requests
|
||||||
|
self._debug: bool = False
|
||||||
|
|
||||||
|
self.client = client
|
||||||
|
|
||||||
|
def _new_reg(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
contact: list[str] | None = None,
|
||||||
|
terms_agreed: bool = False,
|
||||||
|
allow_creation: bool = True,
|
||||||
|
external_account_binding: dict[str, t.Any] | None = None,
|
||||||
|
) -> tuple[bool, dict[str, t.Any] | None]:
|
||||||
|
"""
|
||||||
|
Registers a new ACME account. Returns a pair ``(created, data)``.
|
||||||
|
Here, ``created`` is ``True`` if the account was created and
|
||||||
|
``False`` if it already existed (e.g. it was not newly created),
|
||||||
|
or does not exist. In case the account was created or exists,
|
||||||
|
``data`` contains the account data; otherwise, it is ``None``.
|
||||||
|
|
||||||
|
If specified, ``external_account_binding`` should be a dictionary
|
||||||
|
with keys ``kid``, ``alg`` and ``key``
|
||||||
|
(https://tools.ietf.org/html/rfc8555#section-7.3.4).
|
||||||
|
|
||||||
|
https://tools.ietf.org/html/rfc8555#section-7.3
|
||||||
|
"""
|
||||||
|
contact = contact or []
|
||||||
|
|
||||||
|
if (
|
||||||
|
external_account_binding is not None
|
||||||
|
or self.client.directory["meta"].get("externalAccountRequired")
|
||||||
|
) and allow_creation:
|
||||||
|
# Some ACME servers such as ZeroSSL do not like it when you try to register an existing account
|
||||||
|
# and provide external_account_binding credentials. Thus we first send a request with allow_creation=False
|
||||||
|
# to see whether the account already exists.
|
||||||
|
|
||||||
|
# Note that we pass contact here: ZeroSSL does not accept registration calls without contacts, even
|
||||||
|
# if onlyReturnExisting is set to true.
|
||||||
|
created, data = self._new_reg(contact=contact, allow_creation=False)
|
||||||
|
if data:
|
||||||
|
# An account already exists! Return data
|
||||||
|
return created, data
|
||||||
|
# An account does not yet exist. Try to create one next.
|
||||||
|
|
||||||
|
new_reg: dict[str, t.Any] = {"contact": contact}
|
||||||
|
if not allow_creation:
|
||||||
|
# https://tools.ietf.org/html/rfc8555#section-7.3.1
|
||||||
|
new_reg["onlyReturnExisting"] = True
|
||||||
|
if terms_agreed:
|
||||||
|
new_reg["termsOfServiceAgreed"] = True
|
||||||
|
url = self.client.directory["newAccount"]
|
||||||
|
if external_account_binding is not None:
|
||||||
|
new_reg["externalAccountBinding"] = self.client.sign_request(
|
||||||
|
protected={
|
||||||
|
"alg": external_account_binding["alg"],
|
||||||
|
"kid": external_account_binding["kid"],
|
||||||
|
"url": url,
|
||||||
|
},
|
||||||
|
payload=self.client.account_jwk,
|
||||||
|
key_data=self.client.backend.create_mac_key(
|
||||||
|
alg=external_account_binding["alg"],
|
||||||
|
key=external_account_binding["key"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
elif (
|
||||||
|
self.client.directory["meta"].get("externalAccountRequired")
|
||||||
|
and allow_creation
|
||||||
|
):
|
||||||
|
raise ModuleFailException(
|
||||||
|
"To create an account, an external account binding must be specified. "
|
||||||
|
"Use the acme_account module with the external_account_binding option."
|
||||||
|
)
|
||||||
|
|
||||||
|
result, info = self.client.send_signed_request(
|
||||||
|
url, new_reg, fail_on_error=False
|
||||||
|
)
|
||||||
|
if not isinstance(result, Mapping):
|
||||||
|
raise ACMEProtocolException(
|
||||||
|
module=self.client.module,
|
||||||
|
msg="Invalid account creation reply from ACME server",
|
||||||
|
info=info,
|
||||||
|
content_json=result,
|
||||||
|
)
|
||||||
|
|
||||||
|
if info["status"] == 201:
|
||||||
|
# Account did not exist
|
||||||
|
if "location" in info:
|
||||||
|
self.client.set_account_uri(info["location"])
|
||||||
|
return True, result
|
||||||
|
if info["status"] == 200:
|
||||||
|
# Account did exist
|
||||||
|
if result.get("status") == "deactivated":
|
||||||
|
# A bug in Pebble (https://github.com/letsencrypt/pebble/issues/179) and
|
||||||
|
# Boulder (https://github.com/letsencrypt/boulder/issues/3971): this should
|
||||||
|
# not return a valid account object according to
|
||||||
|
# https://tools.ietf.org/html/rfc8555#section-7.3.6:
|
||||||
|
# "Once an account is deactivated, the server MUST NOT accept further
|
||||||
|
# requests authorized by that account's key."
|
||||||
|
if not allow_creation:
|
||||||
|
return False, None
|
||||||
|
raise ModuleFailException("Account is deactivated")
|
||||||
|
if "location" in info:
|
||||||
|
self.client.set_account_uri(info["location"])
|
||||||
|
return False, result
|
||||||
|
if (
|
||||||
|
info["status"] in (400, 404)
|
||||||
|
and result["type"] == "urn:ietf:params:acme:error:accountDoesNotExist"
|
||||||
|
and not allow_creation
|
||||||
|
):
|
||||||
|
# Account does not exist (and we did not try to create it)
|
||||||
|
# (According to RFC 8555, Section 7.3.1, the HTTP status code MUST be 400.
|
||||||
|
# Unfortunately Digicert does not care and sends 404 instead.)
|
||||||
|
return False, None
|
||||||
|
if (
|
||||||
|
info["status"] == 403
|
||||||
|
and result["type"] == "urn:ietf:params:acme:error:unauthorized"
|
||||||
|
and "deactivated" in (result.get("detail") or "")
|
||||||
|
):
|
||||||
|
# Account has been deactivated; currently works for Pebble; has not been
|
||||||
|
# implemented for Boulder (https://github.com/letsencrypt/boulder/issues/3971),
|
||||||
|
# might need adjustment in error detection.
|
||||||
|
if not allow_creation:
|
||||||
|
return False, None
|
||||||
|
raise ModuleFailException("Account is deactivated")
|
||||||
|
raise ACMEProtocolException(
|
||||||
|
module=self.client.module,
|
||||||
|
msg="Registering ACME account failed",
|
||||||
|
info=info,
|
||||||
|
content_json=result,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_account_data(self) -> dict[str, t.Any] | None:
|
||||||
|
"""
|
||||||
|
Retrieve account information. Can only be called when the account
|
||||||
|
URI is already known (such as after calling setup_account).
|
||||||
|
Return None if the account was deactivated, or a dict otherwise.
|
||||||
|
"""
|
||||||
|
if self.client.account_uri is None:
|
||||||
|
raise ModuleFailException("Account URI unknown")
|
||||||
|
# try POST-as-GET first (draft-15 or newer)
|
||||||
|
data: dict[str, t.Any] | None = None
|
||||||
|
result, info = self.client.send_signed_request(
|
||||||
|
self.client.account_uri, data, fail_on_error=False
|
||||||
|
)
|
||||||
|
# check whether that failed with a malformed request error
|
||||||
|
if (
|
||||||
|
info["status"] >= 400
|
||||||
|
and result.get("type") == "urn:ietf:params:acme:error:malformed"
|
||||||
|
):
|
||||||
|
# retry as a regular POST (with no changed data) for pre-draft-15 ACME servers
|
||||||
|
data = {}
|
||||||
|
result, info = self.client.send_signed_request(
|
||||||
|
self.client.account_uri, data, fail_on_error=False
|
||||||
|
)
|
||||||
|
if not isinstance(result, Mapping):
|
||||||
|
raise ACMEProtocolException(
|
||||||
|
module=self.client.module,
|
||||||
|
msg="Invalid account data retrieved from ACME server",
|
||||||
|
info=info,
|
||||||
|
content_json=result,
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
info["status"] in (400, 403)
|
||||||
|
and result.get("type") == "urn:ietf:params:acme:error:unauthorized"
|
||||||
|
):
|
||||||
|
# Returned when account is deactivated
|
||||||
|
return None
|
||||||
|
if (
|
||||||
|
info["status"] in (400, 404)
|
||||||
|
and result.get("type") == "urn:ietf:params:acme:error:accountDoesNotExist"
|
||||||
|
):
|
||||||
|
# Returned when account does not exist
|
||||||
|
return None
|
||||||
|
if info["status"] < 200 or info["status"] >= 300:
|
||||||
|
raise ACMEProtocolException(
|
||||||
|
module=self.client.module,
|
||||||
|
msg="Error retrieving account data",
|
||||||
|
info=info,
|
||||||
|
content_json=result,
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def setup_account(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
contact: list[str] | None = None,
|
||||||
|
terms_agreed: bool = False,
|
||||||
|
allow_creation: t.Literal[True] = True,
|
||||||
|
remove_account_uri_if_not_exists: bool = False,
|
||||||
|
external_account_binding: dict[str, t.Any] | None = None,
|
||||||
|
) -> tuple[bool, dict[str, t.Any]]: ...
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def setup_account(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
contact: list[str] | None = None,
|
||||||
|
terms_agreed: bool = False,
|
||||||
|
allow_creation: bool = True,
|
||||||
|
remove_account_uri_if_not_exists: bool = False,
|
||||||
|
external_account_binding: dict[str, t.Any] | None = None,
|
||||||
|
) -> tuple[bool, dict[str, t.Any] | None]: ...
|
||||||
|
|
||||||
|
def setup_account(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
contact: list[str] | None = None,
|
||||||
|
terms_agreed: bool = False,
|
||||||
|
allow_creation: bool = True,
|
||||||
|
remove_account_uri_if_not_exists: bool = False,
|
||||||
|
external_account_binding: dict[str, t.Any] | None = None,
|
||||||
|
) -> tuple[bool, dict[str, t.Any] | None]:
|
||||||
|
"""
|
||||||
|
Detect or create an account on the ACME server. For ACME v1,
|
||||||
|
as the only way (without knowing an account URI) to test if an
|
||||||
|
account exists is to try and create one with the provided account
|
||||||
|
key, this method will always result in an account being present
|
||||||
|
(except on error situations). For ACME v2, a new account will
|
||||||
|
only be created if ``allow_creation`` is set to True.
|
||||||
|
|
||||||
|
For ACME v2, ``check_mode`` is fully respected. For ACME v1, the
|
||||||
|
account might be created if it does not yet exist.
|
||||||
|
|
||||||
|
Return a pair ``(created, account_data)``. Here, ``created`` will
|
||||||
|
be ``True`` in case the account was created or would be created
|
||||||
|
(check mode). ``account_data`` will be the current account data,
|
||||||
|
or ``None`` if the account does not exist.
|
||||||
|
|
||||||
|
The account URI will be stored in ``client.account_uri``; if it is ``None``,
|
||||||
|
the account does not exist.
|
||||||
|
|
||||||
|
If specified, ``external_account_binding`` should be a dictionary
|
||||||
|
with keys ``kid``, ``alg`` and ``key``
|
||||||
|
(https://tools.ietf.org/html/rfc8555#section-7.3.4).
|
||||||
|
|
||||||
|
https://tools.ietf.org/html/rfc8555#section-7.3
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.client.account_uri is not None:
|
||||||
|
created = False
|
||||||
|
# Verify that the account key belongs to the URI.
|
||||||
|
# (If update_contact is True, this will be done below.)
|
||||||
|
account_data = self.get_account_data()
|
||||||
|
if account_data is None:
|
||||||
|
if remove_account_uri_if_not_exists and not allow_creation:
|
||||||
|
self.client.account_uri = None
|
||||||
|
else:
|
||||||
|
raise ModuleFailException(
|
||||||
|
"Account is deactivated or does not exist!"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
created, account_data = self._new_reg(
|
||||||
|
contact=contact,
|
||||||
|
terms_agreed=terms_agreed,
|
||||||
|
allow_creation=allow_creation and not self.client.module.check_mode,
|
||||||
|
external_account_binding=external_account_binding,
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
self.client.module.check_mode
|
||||||
|
and self.client.account_uri is None
|
||||||
|
and allow_creation
|
||||||
|
):
|
||||||
|
created = True
|
||||||
|
account_data = {"contact": contact or []}
|
||||||
|
return created, account_data
|
||||||
|
|
||||||
|
def update_account(
|
||||||
|
self, *, account_data: dict[str, t.Any], contact: list[str] | None = None
|
||||||
|
) -> tuple[bool, dict[str, t.Any]]:
|
||||||
|
"""
|
||||||
|
Update an account on the ACME server. Check mode is fully respected.
|
||||||
|
|
||||||
|
The current account data must be provided as ``account_data``.
|
||||||
|
|
||||||
|
Return a pair ``(updated, account_data)``, where ``updated`` is
|
||||||
|
``True`` in case something changed (contact info updated) or
|
||||||
|
would be changed (check mode), and ``account_data`` the updated
|
||||||
|
account data.
|
||||||
|
|
||||||
|
https://tools.ietf.org/html/rfc8555#section-7.3.2
|
||||||
|
"""
|
||||||
|
if self.client.account_uri is None:
|
||||||
|
raise ModuleFailException("Cannot update account without account URI")
|
||||||
|
|
||||||
|
# Create request
|
||||||
|
update_request: dict[str, t.Any] = {}
|
||||||
|
if contact is not None and account_data.get("contact", []) != contact:
|
||||||
|
update_request["contact"] = list(contact)
|
||||||
|
|
||||||
|
# No change?
|
||||||
|
if not update_request:
|
||||||
|
return False, dict(account_data)
|
||||||
|
|
||||||
|
# Apply change
|
||||||
|
if self.client.module.check_mode:
|
||||||
|
account_data = dict(account_data)
|
||||||
|
account_data.update(update_request)
|
||||||
|
else:
|
||||||
|
account_data, info = self.client.send_signed_request(
|
||||||
|
self.client.account_uri, update_request
|
||||||
|
)
|
||||||
|
if not isinstance(account_data, Mapping):
|
||||||
|
raise ACMEProtocolException(
|
||||||
|
module=self.client.module,
|
||||||
|
msg="Invalid account updating reply from ACME server",
|
||||||
|
info=info,
|
||||||
|
content_json=account_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
return True, account_data
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("ACMEAccount",)
|
||||||
738
plugins/module_utils/_acme/acme.py
Normal file
738
plugins/module_utils/_acme/acme.py
Normal file
@@ -0,0 +1,738 @@
|
|||||||
|
# Copyright (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
|
||||||
|
# Copyright (c) 2021 Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import copy
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import locale
|
||||||
|
import time
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import missing_required_lib
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
|
from ansible.module_utils.urls import fetch_url
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.backend_cryptography import (
|
||||||
|
CRYPTOGRAPHY_ERROR,
|
||||||
|
CRYPTOGRAPHY_MINIMAL_VERSION,
|
||||||
|
CRYPTOGRAPHY_VERSION,
|
||||||
|
HAS_CURRENT_CRYPTOGRAPHY,
|
||||||
|
CryptographyBackend,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.backend_openssl_cli import (
|
||||||
|
OpenSSLCLIBackend,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.errors import (
|
||||||
|
ACMEProtocolException,
|
||||||
|
KeyParsingError,
|
||||||
|
ModuleFailException,
|
||||||
|
NetworkException,
|
||||||
|
format_http_status,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.utils import (
|
||||||
|
compute_cert_id,
|
||||||
|
nopad_b64,
|
||||||
|
parse_retry_after,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._argspec import (
|
||||||
|
ArgumentSpec,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._time import (
|
||||||
|
get_now_datetime,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
import os
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.account import (
|
||||||
|
ACMEAccount,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.backends import (
|
||||||
|
CertificateInformation,
|
||||||
|
CryptoBackend,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# -1 usually means connection problems
|
||||||
|
RETRY_STATUS_CODES = (-1, 408, 429, 503)
|
||||||
|
|
||||||
|
RETRY_COUNT = 10
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_retry(
|
||||||
|
*, module: AnsibleModule, response: t.Any, info: dict[str, t.Any], retry_count: int
|
||||||
|
) -> bool:
|
||||||
|
if info["status"] not in RETRY_STATUS_CODES:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if retry_count >= RETRY_COUNT:
|
||||||
|
raise ACMEProtocolException(
|
||||||
|
module=module,
|
||||||
|
msg=f"Giving up after {RETRY_COUNT} retries",
|
||||||
|
info=info,
|
||||||
|
response=response,
|
||||||
|
)
|
||||||
|
|
||||||
|
# 429 and 503 should have a Retry-After header (https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After)
|
||||||
|
now = get_now_datetime(with_timezone=True)
|
||||||
|
try:
|
||||||
|
then = parse_retry_after(
|
||||||
|
info.get("retry-after", "10"), relative_with_timezone=True, now=now
|
||||||
|
)
|
||||||
|
retry_after = (then - now).total_seconds()
|
||||||
|
retry_after = min(max(1, retry_after), 60)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
retry_after = 10
|
||||||
|
module.log(
|
||||||
|
f"Retrieved a {format_http_status(info['status'])} HTTP status on {info['url']}, retrying in {retry_after} seconds"
|
||||||
|
)
|
||||||
|
|
||||||
|
time.sleep(retry_after)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _assert_fetch_url_success(
|
||||||
|
*,
|
||||||
|
module: AnsibleModule,
|
||||||
|
response: t.Any,
|
||||||
|
info: dict[str, t.Any],
|
||||||
|
allow_redirect: bool = False,
|
||||||
|
allow_client_error: bool = True,
|
||||||
|
allow_server_error: bool = True,
|
||||||
|
) -> None:
|
||||||
|
if info["status"] < 0:
|
||||||
|
raise NetworkException(msg=f"Failure downloading {info['url']}, {info['msg']}")
|
||||||
|
|
||||||
|
if (
|
||||||
|
(300 <= info["status"] < 400 and not allow_redirect)
|
||||||
|
or (400 <= info["status"] < 500 and not allow_client_error)
|
||||||
|
or (info["status"] >= 500 and not allow_server_error)
|
||||||
|
):
|
||||||
|
raise ACMEProtocolException(module=module, info=info, response=response)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_failed(
|
||||||
|
*, info: dict[str, t.Any], expected_status_codes: t.Iterable[int] | None = None
|
||||||
|
) -> bool:
|
||||||
|
if info["status"] < 200 or info["status"] >= 400:
|
||||||
|
return True
|
||||||
|
if (
|
||||||
|
expected_status_codes is not None
|
||||||
|
and info["status"] not in expected_status_codes
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class ACMEDirectory:
|
||||||
|
"""
|
||||||
|
The ACME server directory. Gives access to the available resources,
|
||||||
|
and allows to obtain a Replay-Nonce. The acme_directory URL
|
||||||
|
needs to support unauthenticated GET requests; ACME endpoints
|
||||||
|
requiring authentication are not supported.
|
||||||
|
https://tools.ietf.org/html/rfc8555#section-7.1.1
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *, module: AnsibleModule, client: ACMEClient) -> None:
|
||||||
|
self.module = module
|
||||||
|
self.directory_root = module.params["acme_directory"]
|
||||||
|
self.version = module.params["acme_version"]
|
||||||
|
|
||||||
|
self.directory, dummy = client.get_request(self.directory_root, get_only=True)
|
||||||
|
|
||||||
|
self.request_timeout = module.params["request_timeout"]
|
||||||
|
|
||||||
|
# Check whether self.version matches what we expect
|
||||||
|
if self.version == 2:
|
||||||
|
for key in ("newNonce", "newAccount", "newOrder"):
|
||||||
|
if key not in self.directory:
|
||||||
|
raise ModuleFailException(
|
||||||
|
"ACME directory does not seem to follow protocol ACME v2"
|
||||||
|
)
|
||||||
|
# Make sure that 'meta' is always available
|
||||||
|
if "meta" not in self.directory:
|
||||||
|
self.directory["meta"] = {}
|
||||||
|
|
||||||
|
def __getitem__(self, key: str) -> t.Any:
|
||||||
|
return self.directory[key]
|
||||||
|
|
||||||
|
def __contains__(self, key: str) -> bool:
|
||||||
|
return key in self.directory
|
||||||
|
|
||||||
|
def get(self, key: str, default_value: t.Any = None) -> t.Any:
|
||||||
|
return self.directory.get(key, default_value)
|
||||||
|
|
||||||
|
def get_nonce(self, resource: str | None = None) -> str:
|
||||||
|
url = self.directory["newNonce"]
|
||||||
|
if resource is not None:
|
||||||
|
url = resource
|
||||||
|
retry_count = 0
|
||||||
|
while True:
|
||||||
|
response, info = fetch_url(
|
||||||
|
self.module, url, method="HEAD", timeout=self.request_timeout
|
||||||
|
)
|
||||||
|
if _decode_retry(
|
||||||
|
module=self.module,
|
||||||
|
response=response,
|
||||||
|
info=info,
|
||||||
|
retry_count=retry_count,
|
||||||
|
):
|
||||||
|
retry_count += 1
|
||||||
|
continue
|
||||||
|
if info["status"] not in (200, 204):
|
||||||
|
raise NetworkException(
|
||||||
|
f"Failed to get replay-nonce, got status {format_http_status(info['status'])}"
|
||||||
|
)
|
||||||
|
if "replay-nonce" in info:
|
||||||
|
return info["replay-nonce"]
|
||||||
|
self.module.log(
|
||||||
|
f"HEAD to {url} did return status {format_http_status(info['status'])}, but no replay-nonce header!"
|
||||||
|
)
|
||||||
|
if retry_count >= 5:
|
||||||
|
raise ACMEProtocolException(
|
||||||
|
module=self.module,
|
||||||
|
msg="Was not able to obtain nonce, giving up after 5 retries",
|
||||||
|
info=info,
|
||||||
|
response=response,
|
||||||
|
)
|
||||||
|
retry_count += 1
|
||||||
|
|
||||||
|
def has_renewal_info_endpoint(self) -> bool:
|
||||||
|
return "renewalInfo" in self.directory
|
||||||
|
|
||||||
|
|
||||||
|
class ACMEClient:
|
||||||
|
"""
|
||||||
|
ACME client object. Handles the authorized communication with the
|
||||||
|
ACME server.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *, module: AnsibleModule, backend: CryptoBackend) -> None:
|
||||||
|
# Set to true to enable logging of all signed requests
|
||||||
|
self._debug = False
|
||||||
|
|
||||||
|
self.module = module
|
||||||
|
self.backend = backend
|
||||||
|
self.version = module.params["acme_version"]
|
||||||
|
# account_key path and content are mutually exclusive
|
||||||
|
self.account_key_file = module.params.get("account_key_src")
|
||||||
|
self.account_key_content = module.params.get("account_key_content")
|
||||||
|
self.account_key_passphrase = module.params.get("account_key_passphrase")
|
||||||
|
|
||||||
|
# Grab account URI from module parameters.
|
||||||
|
# Make sure empty string is treated as None.
|
||||||
|
self.account_uri = module.params.get("account_uri") or None
|
||||||
|
|
||||||
|
self.request_timeout = module.params["request_timeout"]
|
||||||
|
|
||||||
|
self.account_key_data = None
|
||||||
|
self.account_jwk = None
|
||||||
|
self.account_jws_header = None
|
||||||
|
if self.account_key_file is not None or self.account_key_content is not None:
|
||||||
|
try:
|
||||||
|
self.account_key_data = self.parse_key(
|
||||||
|
key_file=self.account_key_file,
|
||||||
|
key_content=self.account_key_content,
|
||||||
|
passphrase=self.account_key_passphrase,
|
||||||
|
)
|
||||||
|
except KeyParsingError as e:
|
||||||
|
raise ModuleFailException(
|
||||||
|
f"Error while parsing account key: {e.msg}"
|
||||||
|
) from e
|
||||||
|
self.account_jwk = self.account_key_data["jwk"]
|
||||||
|
self.account_jws_header = {
|
||||||
|
"alg": self.account_key_data["alg"],
|
||||||
|
"jwk": self.account_jwk,
|
||||||
|
}
|
||||||
|
if self.account_uri:
|
||||||
|
# Make sure self.account_jws_header is updated
|
||||||
|
self.set_account_uri(self.account_uri)
|
||||||
|
|
||||||
|
self.directory = ACMEDirectory(module=module, client=self)
|
||||||
|
|
||||||
|
def set_account_uri(self, uri: str) -> None:
|
||||||
|
"""
|
||||||
|
Set account URI. For ACME v2, it needs to be used to sending signed
|
||||||
|
requests.
|
||||||
|
"""
|
||||||
|
self.account_uri = uri
|
||||||
|
if self.account_jws_header:
|
||||||
|
self.account_jws_header.pop("jwk", None)
|
||||||
|
self.account_jws_header["kid"] = self.account_uri
|
||||||
|
|
||||||
|
def parse_key(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
key_file: str | os.PathLike | None = None,
|
||||||
|
key_content: str | None = None,
|
||||||
|
passphrase: str | None = None,
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
"""
|
||||||
|
Parses an RSA or Elliptic Curve key file in PEM format and returns key_data.
|
||||||
|
In case of an error, raises KeyParsingError.
|
||||||
|
"""
|
||||||
|
if key_file is None and key_content is None:
|
||||||
|
raise AssertionError("One of key_file and key_content must be specified!")
|
||||||
|
return self.backend.parse_key(
|
||||||
|
key_file=key_file, key_content=key_content, passphrase=passphrase
|
||||||
|
)
|
||||||
|
|
||||||
|
def sign_request(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
protected: dict[str, t.Any],
|
||||||
|
payload: str | dict[str, t.Any] | None,
|
||||||
|
key_data: dict[str, t.Any],
|
||||||
|
encode_payload: bool = True,
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
"""
|
||||||
|
Signs an ACME request.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if payload is None:
|
||||||
|
# POST-as-GET
|
||||||
|
payload64 = ""
|
||||||
|
else:
|
||||||
|
# POST
|
||||||
|
if encode_payload:
|
||||||
|
payload = self.module.jsonify(payload).encode("utf8")
|
||||||
|
payload64 = nopad_b64(to_bytes(payload))
|
||||||
|
protected64 = nopad_b64(self.module.jsonify(protected).encode("utf8"))
|
||||||
|
except Exception as e:
|
||||||
|
raise ModuleFailException(
|
||||||
|
f"Failed to encode payload / headers as JSON: {e}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
return self.backend.sign(
|
||||||
|
payload64=payload64, protected64=protected64, key_data=key_data
|
||||||
|
)
|
||||||
|
|
||||||
|
def _log(self, msg: str, *, data: t.Any = None) -> None:
|
||||||
|
"""
|
||||||
|
Write arguments to acme.log when logging is enabled.
|
||||||
|
"""
|
||||||
|
if self._debug:
|
||||||
|
with open("acme.log", "ab") as f:
|
||||||
|
timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%s")
|
||||||
|
f.write(f"[{timestamp}] {msg}\n".encode("utf-8"))
|
||||||
|
if data is not None:
|
||||||
|
f.write(
|
||||||
|
f"{json.dumps(data, indent=2, sort_keys=True)}\n\n".encode(
|
||||||
|
"utf-8"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def send_signed_request(
|
||||||
|
self,
|
||||||
|
url: str,
|
||||||
|
payload: str | dict[str, t.Any] | None,
|
||||||
|
*,
|
||||||
|
key_data: dict[str, t.Any] | None = None,
|
||||||
|
jws_header: dict[str, t.Any] | None = None,
|
||||||
|
parse_json_result: t.Literal[True] = True,
|
||||||
|
encode_payload: bool = True,
|
||||||
|
fail_on_error: bool = True,
|
||||||
|
error_msg: str | None = None,
|
||||||
|
expected_status_codes: t.Iterable[int] | None = None,
|
||||||
|
) -> tuple[dict[str, t.Any], dict[str, t.Any]]: ...
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def send_signed_request(
|
||||||
|
self,
|
||||||
|
url: str,
|
||||||
|
payload: str | dict[str, t.Any] | None,
|
||||||
|
*,
|
||||||
|
key_data: dict[str, t.Any] | None = None,
|
||||||
|
jws_header: dict[str, t.Any] | None = None,
|
||||||
|
parse_json_result: t.Literal[False],
|
||||||
|
encode_payload: bool = True,
|
||||||
|
fail_on_error: bool = True,
|
||||||
|
error_msg: str | None = None,
|
||||||
|
expected_status_codes: t.Iterable[int] | None = None,
|
||||||
|
) -> tuple[bytes, dict[str, t.Any]]: ...
|
||||||
|
|
||||||
|
def send_signed_request(
|
||||||
|
self,
|
||||||
|
url: str,
|
||||||
|
payload: str | dict[str, t.Any] | None,
|
||||||
|
*,
|
||||||
|
key_data: dict[str, t.Any] | None = None,
|
||||||
|
jws_header: dict[str, t.Any] | None = None,
|
||||||
|
parse_json_result: bool = True,
|
||||||
|
encode_payload: bool = True,
|
||||||
|
fail_on_error: bool = True,
|
||||||
|
error_msg: str | None = None,
|
||||||
|
expected_status_codes: t.Iterable[int] | None = None,
|
||||||
|
) -> tuple[dict[str, t.Any] | bytes, dict[str, t.Any]]:
|
||||||
|
"""
|
||||||
|
Sends a JWS signed HTTP POST request to the ACME server and returns
|
||||||
|
the response as dictionary (if parse_json_result is True) or in raw form
|
||||||
|
(if parse_json_result is False).
|
||||||
|
https://tools.ietf.org/html/rfc8555#section-6.2
|
||||||
|
|
||||||
|
If payload is None, a POST-as-GET is performed.
|
||||||
|
(https://tools.ietf.org/html/rfc8555#section-6.3)
|
||||||
|
"""
|
||||||
|
key_data = key_data or self.account_key_data
|
||||||
|
if key_data is None:
|
||||||
|
raise ModuleFailException("Missing key data")
|
||||||
|
jws_header = jws_header or self.account_jws_header
|
||||||
|
if jws_header is None:
|
||||||
|
raise ModuleFailException("Missing JWS header")
|
||||||
|
failed_tries = 0
|
||||||
|
while True:
|
||||||
|
protected = copy.deepcopy(jws_header)
|
||||||
|
protected["nonce"] = self.directory.get_nonce()
|
||||||
|
protected["url"] = url
|
||||||
|
|
||||||
|
self._log("URL", data=url)
|
||||||
|
self._log("protected", data=protected)
|
||||||
|
self._log("payload", data=payload)
|
||||||
|
data = self.sign_request(
|
||||||
|
protected=protected,
|
||||||
|
payload=payload,
|
||||||
|
key_data=key_data,
|
||||||
|
encode_payload=encode_payload,
|
||||||
|
)
|
||||||
|
self._log("signed request", data=data)
|
||||||
|
data = self.module.jsonify(data)
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"Content-Type": "application/jose+json",
|
||||||
|
}
|
||||||
|
resp, info = fetch_url(
|
||||||
|
self.module,
|
||||||
|
url,
|
||||||
|
data=data,
|
||||||
|
headers=headers,
|
||||||
|
method="POST",
|
||||||
|
timeout=self.request_timeout,
|
||||||
|
)
|
||||||
|
if _decode_retry(
|
||||||
|
module=self.module, response=resp, info=info, retry_count=failed_tries
|
||||||
|
):
|
||||||
|
failed_tries += 1
|
||||||
|
continue
|
||||||
|
_assert_fetch_url_success(module=self.module, response=resp, info=info)
|
||||||
|
result = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# In Python 2, reading from a closed response yields a TypeError.
|
||||||
|
# In Python 3, read() simply returns ''
|
||||||
|
if resp.closed:
|
||||||
|
raise TypeError
|
||||||
|
content = resp.read()
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
content = info.pop("body", None)
|
||||||
|
|
||||||
|
if content or not parse_json_result:
|
||||||
|
if (
|
||||||
|
parse_json_result
|
||||||
|
and info["content-type"].startswith("application/json")
|
||||||
|
) or 400 <= info["status"] < 600:
|
||||||
|
try:
|
||||||
|
decoded_result = self.module.from_json(content.decode("utf8"))
|
||||||
|
self._log("parsed result", data=decoded_result)
|
||||||
|
# In case of badNonce error, try again (up to 5 times)
|
||||||
|
# (https://tools.ietf.org/html/rfc8555#section-6.7)
|
||||||
|
if all(
|
||||||
|
(
|
||||||
|
400 <= info["status"] < 600,
|
||||||
|
decoded_result.get("type")
|
||||||
|
== "urn:ietf:params:acme:error:badNonce",
|
||||||
|
failed_tries <= 5,
|
||||||
|
)
|
||||||
|
):
|
||||||
|
failed_tries += 1
|
||||||
|
continue
|
||||||
|
if parse_json_result:
|
||||||
|
result = decoded_result
|
||||||
|
else:
|
||||||
|
result = content
|
||||||
|
except ValueError as exc:
|
||||||
|
raise NetworkException(
|
||||||
|
f"Failed to parse the ACME response: {url} {content}"
|
||||||
|
) from exc
|
||||||
|
else:
|
||||||
|
result = content
|
||||||
|
|
||||||
|
if fail_on_error and _is_failed(
|
||||||
|
info=info, expected_status_codes=expected_status_codes
|
||||||
|
):
|
||||||
|
raise ACMEProtocolException(
|
||||||
|
module=self.module,
|
||||||
|
msg=error_msg,
|
||||||
|
info=info,
|
||||||
|
content=content,
|
||||||
|
content_json=result if parse_json_result else None,
|
||||||
|
)
|
||||||
|
return result, info
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def get_request(
|
||||||
|
self,
|
||||||
|
uri: str,
|
||||||
|
*,
|
||||||
|
parse_json_result: t.Literal[True] = True,
|
||||||
|
headers: dict[str, str] | None = None,
|
||||||
|
get_only: bool = False,
|
||||||
|
fail_on_error: bool = True,
|
||||||
|
error_msg: str | None = None,
|
||||||
|
expected_status_codes: t.Iterable[int] | None = None,
|
||||||
|
) -> tuple[dict[str, t.Any], dict[str, t.Any]]: ...
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def get_request(
|
||||||
|
self,
|
||||||
|
uri: str,
|
||||||
|
*,
|
||||||
|
parse_json_result: t.Literal[False],
|
||||||
|
headers: dict[str, str] | None = None,
|
||||||
|
get_only: bool = False,
|
||||||
|
fail_on_error: bool = True,
|
||||||
|
error_msg: str | None = None,
|
||||||
|
expected_status_codes: t.Iterable[int] | None = None,
|
||||||
|
) -> tuple[bytes, dict[str, t.Any]]: ...
|
||||||
|
|
||||||
|
def get_request(
|
||||||
|
self,
|
||||||
|
uri: str,
|
||||||
|
*,
|
||||||
|
parse_json_result: bool = True,
|
||||||
|
headers: dict[str, str] | None = None,
|
||||||
|
get_only: bool = False,
|
||||||
|
fail_on_error: bool = True,
|
||||||
|
error_msg: str | None = None,
|
||||||
|
expected_status_codes: t.Iterable[int] | None = None,
|
||||||
|
) -> tuple[dict[str, t.Any] | bytes, dict[str, t.Any]]:
|
||||||
|
"""
|
||||||
|
Perform a GET-like request. Will try POST-as-GET for ACMEv2, with fallback
|
||||||
|
to GET if server replies with a status code of 405.
|
||||||
|
"""
|
||||||
|
if not get_only:
|
||||||
|
# Try POST-as-GET
|
||||||
|
content, info = self.send_signed_request(
|
||||||
|
uri, None, parse_json_result=False, fail_on_error=False
|
||||||
|
)
|
||||||
|
if info["status"] == 405:
|
||||||
|
# Instead, do unauthenticated GET
|
||||||
|
get_only = True
|
||||||
|
else:
|
||||||
|
# Do unauthenticated GET
|
||||||
|
get_only = True
|
||||||
|
|
||||||
|
if get_only:
|
||||||
|
# Perform unauthenticated GET
|
||||||
|
retry_count = 0
|
||||||
|
while True:
|
||||||
|
resp, info = fetch_url(
|
||||||
|
self.module,
|
||||||
|
uri,
|
||||||
|
method="GET",
|
||||||
|
headers=headers,
|
||||||
|
timeout=self.request_timeout,
|
||||||
|
)
|
||||||
|
if not _decode_retry(
|
||||||
|
module=self.module,
|
||||||
|
response=resp,
|
||||||
|
info=info,
|
||||||
|
retry_count=retry_count,
|
||||||
|
):
|
||||||
|
break
|
||||||
|
retry_count += 1
|
||||||
|
|
||||||
|
_assert_fetch_url_success(module=self.module, response=resp, info=info)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# In Python 2, reading from a closed response yields a TypeError.
|
||||||
|
# In Python 3, read() simply returns ''
|
||||||
|
if resp.closed:
|
||||||
|
raise TypeError
|
||||||
|
content = resp.read()
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
content = info.pop("body", None)
|
||||||
|
|
||||||
|
# Process result
|
||||||
|
parsed_json_result = False
|
||||||
|
result: dict[str, t.Any] | bytes
|
||||||
|
if parse_json_result:
|
||||||
|
result = {}
|
||||||
|
if content:
|
||||||
|
if info["content-type"].startswith("application/json"):
|
||||||
|
try:
|
||||||
|
result = self.module.from_json(content.decode("utf8"))
|
||||||
|
parsed_json_result = True
|
||||||
|
except ValueError as exc:
|
||||||
|
raise NetworkException(
|
||||||
|
f"Failed to parse the ACME response: {uri} {content!r}"
|
||||||
|
) from exc
|
||||||
|
else:
|
||||||
|
result = content
|
||||||
|
else:
|
||||||
|
result = content
|
||||||
|
|
||||||
|
if fail_on_error and _is_failed(
|
||||||
|
info=info, expected_status_codes=expected_status_codes
|
||||||
|
):
|
||||||
|
raise ACMEProtocolException(
|
||||||
|
module=self.module,
|
||||||
|
msg=error_msg,
|
||||||
|
info=info,
|
||||||
|
content=content,
|
||||||
|
content_json=(
|
||||||
|
t.cast(dict[str, t.Any], result) if parsed_json_result else None
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return result, info
|
||||||
|
|
||||||
|
def get_renewal_info(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
cert_id: str | None = None,
|
||||||
|
cert_info: CertificateInformation | None = None,
|
||||||
|
cert_filename: str | os.PathLike | None = None,
|
||||||
|
cert_content: str | bytes | None = None,
|
||||||
|
include_retry_after: bool = False,
|
||||||
|
retry_after_relative_with_timezone: bool = True,
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
if not self.directory.has_renewal_info_endpoint():
|
||||||
|
raise ModuleFailException(
|
||||||
|
"The ACME endpoint does not support ACME Renewal Information retrieval"
|
||||||
|
)
|
||||||
|
|
||||||
|
if cert_id is None:
|
||||||
|
cert_id = compute_cert_id(
|
||||||
|
backend=self.backend,
|
||||||
|
cert_info=cert_info,
|
||||||
|
cert_filename=cert_filename,
|
||||||
|
cert_content=cert_content,
|
||||||
|
)
|
||||||
|
url = f"{self.directory.directory['renewalInfo'].rstrip('/')}/{cert_id}"
|
||||||
|
|
||||||
|
data, info = self.get_request(
|
||||||
|
url, parse_json_result=True, fail_on_error=True, get_only=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Include Retry-After header if asked for
|
||||||
|
if include_retry_after and "retry-after" in info:
|
||||||
|
try:
|
||||||
|
data["retryAfter"] = parse_retry_after(
|
||||||
|
info["retry-after"],
|
||||||
|
relative_with_timezone=retry_after_relative_with_timezone,
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def create_default_argspec(
|
||||||
|
*,
|
||||||
|
with_account: bool = True,
|
||||||
|
require_account_key: bool = True,
|
||||||
|
with_certificate: bool = False,
|
||||||
|
) -> ArgumentSpec:
|
||||||
|
"""
|
||||||
|
Provides default argument spec for the options documented in the acme doc fragment.
|
||||||
|
"""
|
||||||
|
result = ArgumentSpec(
|
||||||
|
argument_spec={
|
||||||
|
"acme_directory": {"type": "str", "required": True},
|
||||||
|
"acme_version": {"type": "int", "choices": [2], "default": 2},
|
||||||
|
"validate_certs": {"type": "bool", "default": True},
|
||||||
|
"select_crypto_backend": {
|
||||||
|
"type": "str",
|
||||||
|
"default": "auto",
|
||||||
|
"choices": ["auto", "openssl", "cryptography"],
|
||||||
|
},
|
||||||
|
"request_timeout": {"type": "int", "default": 10},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if with_account:
|
||||||
|
result.update_argspec(
|
||||||
|
account_key_src={"type": "path", "aliases": ["account_key"]},
|
||||||
|
account_key_content={"type": "str", "no_log": True},
|
||||||
|
account_key_passphrase={"type": "str", "no_log": True},
|
||||||
|
account_uri={"type": "str"},
|
||||||
|
)
|
||||||
|
if require_account_key:
|
||||||
|
result.update(required_one_of=[["account_key_src", "account_key_content"]])
|
||||||
|
result.update(mutually_exclusive=[["account_key_src", "account_key_content"]])
|
||||||
|
if with_certificate:
|
||||||
|
result.update_argspec(
|
||||||
|
csr={"type": "path"},
|
||||||
|
csr_content={"type": "str"},
|
||||||
|
)
|
||||||
|
result.update(
|
||||||
|
required_one_of=[["csr", "csr_content"]],
|
||||||
|
mutually_exclusive=[["csr", "csr_content"]],
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def create_backend(
|
||||||
|
module: AnsibleModule, *, needs_acme_v2: bool = True
|
||||||
|
) -> CryptoBackend:
|
||||||
|
backend = module.params["select_crypto_backend"]
|
||||||
|
|
||||||
|
# Backend autodetect
|
||||||
|
if backend == "auto":
|
||||||
|
backend = "cryptography" if HAS_CURRENT_CRYPTOGRAPHY else "openssl"
|
||||||
|
|
||||||
|
# Create backend object
|
||||||
|
module_backend: CryptoBackend
|
||||||
|
if backend == "cryptography":
|
||||||
|
if CRYPTOGRAPHY_ERROR is not None:
|
||||||
|
# Either we could not import cryptography at all, or there was an unexpected error
|
||||||
|
if CRYPTOGRAPHY_VERSION is None:
|
||||||
|
msg = missing_required_lib("cryptography")
|
||||||
|
else:
|
||||||
|
msg = f"Unexpected error while preparing cryptography: {CRYPTOGRAPHY_ERROR.splitlines()[-1]}"
|
||||||
|
module.fail_json(msg=msg, exception=CRYPTOGRAPHY_ERROR)
|
||||||
|
if not HAS_CURRENT_CRYPTOGRAPHY:
|
||||||
|
# We succeeded importing cryptography, but its version is too old.
|
||||||
|
mrl = missing_required_lib(
|
||||||
|
f"cryptography >= {CRYPTOGRAPHY_MINIMAL_VERSION}"
|
||||||
|
)
|
||||||
|
module.fail_json(
|
||||||
|
msg=f"Found cryptography, but only version {CRYPTOGRAPHY_VERSION}. {mrl}"
|
||||||
|
)
|
||||||
|
module.debug(
|
||||||
|
f"Using cryptography backend (library version {CRYPTOGRAPHY_VERSION})"
|
||||||
|
)
|
||||||
|
module_backend = CryptographyBackend(module=module)
|
||||||
|
elif backend == "openssl":
|
||||||
|
module.debug("Using OpenSSL binary backend")
|
||||||
|
module_backend = OpenSSLCLIBackend(module=module)
|
||||||
|
else:
|
||||||
|
module.fail_json(msg=f'Unknown crypto backend "{backend}"!')
|
||||||
|
|
||||||
|
# Check common module parameters
|
||||||
|
if not module.params["validate_certs"]:
|
||||||
|
module.warn(
|
||||||
|
"Disabling certificate validation for communications with ACME endpoint. "
|
||||||
|
"This should only be done for testing against a local ACME server for "
|
||||||
|
"development purposes, but *never* for production purposes."
|
||||||
|
)
|
||||||
|
|
||||||
|
# AnsibleModule() changes the locale, so change it back to C because we rely
|
||||||
|
# on datetime.datetime.strptime() when parsing certificate dates.
|
||||||
|
locale.setlocale(locale.LC_ALL, "C")
|
||||||
|
|
||||||
|
return module_backend
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"ACMEDirectory",
|
||||||
|
"ACMEClient",
|
||||||
|
"create_default_argspec",
|
||||||
|
"create_backend",
|
||||||
|
)
|
||||||
550
plugins/module_utils/_acme/backend_cryptography.py
Normal file
550
plugins/module_utils/_acme/backend_cryptography.py
Normal file
@@ -0,0 +1,550 @@
|
|||||||
|
# Copyright (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
|
||||||
|
# Copyright (c) 2021 Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import binascii
|
||||||
|
import os
|
||||||
|
import traceback
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.backends import (
|
||||||
|
CertificateInformation,
|
||||||
|
CryptoBackend,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.certificates import (
|
||||||
|
ChainMatcher,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.errors import (
|
||||||
|
BackendException,
|
||||||
|
KeyParsingError,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.io import read_file
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.utils import (
|
||||||
|
nopad_b64,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_support import (
|
||||||
|
CRYPTOGRAPHY_TIMEZONE,
|
||||||
|
cryptography_name_to_oid,
|
||||||
|
get_not_valid_after,
|
||||||
|
get_not_valid_before,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.math import (
|
||||||
|
convert_int_to_bytes,
|
||||||
|
convert_int_to_hex,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.pem import (
|
||||||
|
extract_first_pem,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.support import (
|
||||||
|
parse_name_field,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._time import (
|
||||||
|
add_or_remove_timezone,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._version import (
|
||||||
|
LooseVersion,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
CRYPTOGRAPHY_MINIMAL_VERSION = "1.5"
|
||||||
|
|
||||||
|
CRYPTOGRAPHY_ERROR = None
|
||||||
|
try:
|
||||||
|
import cryptography
|
||||||
|
import cryptography.hazmat.backends
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.ec
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.padding
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.rsa
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.utils
|
||||||
|
import cryptography.hazmat.primitives.hashes
|
||||||
|
import cryptography.hazmat.primitives.hmac
|
||||||
|
import cryptography.hazmat.primitives.serialization
|
||||||
|
import cryptography.x509
|
||||||
|
import cryptography.x509.oid
|
||||||
|
except ImportError:
|
||||||
|
HAS_CURRENT_CRYPTOGRAPHY = False
|
||||||
|
CRYPTOGRAPHY_VERSION = None
|
||||||
|
CRYPTOGRAPHY_ERROR = traceback.format_exc()
|
||||||
|
else:
|
||||||
|
CRYPTOGRAPHY_VERSION = cryptography.__version__
|
||||||
|
HAS_CURRENT_CRYPTOGRAPHY = LooseVersion(CRYPTOGRAPHY_VERSION) >= LooseVersion(
|
||||||
|
CRYPTOGRAPHY_MINIMAL_VERSION
|
||||||
|
)
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.certificates import (
|
||||||
|
CertificateChain,
|
||||||
|
Criterium,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CryptographyChainMatcher(ChainMatcher):
|
||||||
|
@staticmethod
|
||||||
|
def _parse_key_identifier(
|
||||||
|
*,
|
||||||
|
key_identifier: str | None,
|
||||||
|
name: str,
|
||||||
|
criterium_idx: int,
|
||||||
|
module: AnsibleModule,
|
||||||
|
) -> bytes | None:
|
||||||
|
if key_identifier:
|
||||||
|
try:
|
||||||
|
return binascii.unhexlify(key_identifier.replace(":", ""))
|
||||||
|
except Exception:
|
||||||
|
if criterium_idx is None:
|
||||||
|
module.warn(
|
||||||
|
f"Criterium has invalid {name} value. Ignoring criterium."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
module.warn(
|
||||||
|
f"Criterium {criterium_idx} in select_chain has invalid {name} value. "
|
||||||
|
"Ignoring criterium."
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __init__(self, *, criterium: Criterium, module: AnsibleModule) -> None:
|
||||||
|
self.criterium = criterium
|
||||||
|
self.test_certificates = criterium.test_certificates
|
||||||
|
self.subject: list[tuple[cryptography.x509.oid.ObjectIdentifier, str]] = []
|
||||||
|
self.issuer: list[tuple[cryptography.x509.oid.ObjectIdentifier, str]] = []
|
||||||
|
if criterium.subject:
|
||||||
|
self.subject = [
|
||||||
|
(cryptography_name_to_oid(k), to_text(v))
|
||||||
|
for k, v in parse_name_field(
|
||||||
|
criterium.subject, name_field_name="subject"
|
||||||
|
)
|
||||||
|
]
|
||||||
|
if criterium.issuer:
|
||||||
|
self.issuer = [
|
||||||
|
(cryptography_name_to_oid(k), to_text(v))
|
||||||
|
for k, v in parse_name_field(criterium.issuer, name_field_name="issuer")
|
||||||
|
]
|
||||||
|
self.subject_key_identifier = CryptographyChainMatcher._parse_key_identifier(
|
||||||
|
key_identifier=criterium.subject_key_identifier,
|
||||||
|
name="subject_key_identifier",
|
||||||
|
criterium_idx=criterium.index,
|
||||||
|
module=module,
|
||||||
|
)
|
||||||
|
self.authority_key_identifier = CryptographyChainMatcher._parse_key_identifier(
|
||||||
|
key_identifier=criterium.authority_key_identifier,
|
||||||
|
name="authority_key_identifier",
|
||||||
|
criterium_idx=criterium.index,
|
||||||
|
module=module,
|
||||||
|
)
|
||||||
|
self.module = module
|
||||||
|
|
||||||
|
def _match_subject(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
x509_subject: cryptography.x509.Name,
|
||||||
|
match_subject: list[tuple[cryptography.x509.oid.ObjectIdentifier, str]],
|
||||||
|
) -> bool:
|
||||||
|
for oid, value in match_subject:
|
||||||
|
found = False
|
||||||
|
for attribute in x509_subject:
|
||||||
|
if attribute.oid == oid and value == to_text(attribute.value):
|
||||||
|
found = True
|
||||||
|
break
|
||||||
|
if not found:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def match(self, *, certificate: CertificateChain) -> bool:
|
||||||
|
"""
|
||||||
|
Check whether an alternate chain matches the specified criterium.
|
||||||
|
"""
|
||||||
|
chain = certificate.chain
|
||||||
|
if self.test_certificates == "last":
|
||||||
|
chain = chain[-1:]
|
||||||
|
elif self.test_certificates == "first":
|
||||||
|
chain = chain[:1]
|
||||||
|
for cert in chain:
|
||||||
|
try:
|
||||||
|
x509 = cryptography.x509.load_pem_x509_certificate(to_bytes(cert))
|
||||||
|
matches = True
|
||||||
|
if not self._match_subject(
|
||||||
|
x509_subject=x509.subject, match_subject=self.subject
|
||||||
|
):
|
||||||
|
matches = False
|
||||||
|
if not self._match_subject(
|
||||||
|
x509_subject=x509.issuer, match_subject=self.issuer
|
||||||
|
):
|
||||||
|
matches = False
|
||||||
|
if self.subject_key_identifier:
|
||||||
|
try:
|
||||||
|
ext_ski = x509.extensions.get_extension_for_class(
|
||||||
|
cryptography.x509.SubjectKeyIdentifier
|
||||||
|
)
|
||||||
|
if self.subject_key_identifier != ext_ski.value.digest:
|
||||||
|
matches = False
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
matches = False
|
||||||
|
if self.authority_key_identifier:
|
||||||
|
try:
|
||||||
|
ext_aki = x509.extensions.get_extension_for_class(
|
||||||
|
cryptography.x509.AuthorityKeyIdentifier
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
self.authority_key_identifier
|
||||||
|
!= ext_aki.value.key_identifier
|
||||||
|
):
|
||||||
|
matches = False
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
matches = False
|
||||||
|
if matches:
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
self.module.warn(f"Error while loading certificate {cert}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class CryptographyBackend(CryptoBackend):
|
||||||
|
def __init__(self, *, module: AnsibleModule) -> None:
|
||||||
|
super().__init__(module=module, with_timezone=CRYPTOGRAPHY_TIMEZONE)
|
||||||
|
|
||||||
|
def parse_key(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
key_file: str | os.PathLike | None = None,
|
||||||
|
key_content: str | None = None,
|
||||||
|
passphrase: str | None = None,
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
"""
|
||||||
|
Parses an RSA or Elliptic Curve key file in PEM format and returns key_data.
|
||||||
|
Raises KeyParsingError in case of errors.
|
||||||
|
"""
|
||||||
|
# If key_content is not given, read key_file
|
||||||
|
if key_content is None:
|
||||||
|
if key_file is None:
|
||||||
|
raise KeyParsingError(
|
||||||
|
"one of key_file and key_content must be specified"
|
||||||
|
)
|
||||||
|
b_key_content = read_file(key_file)
|
||||||
|
else:
|
||||||
|
b_key_content = to_bytes(key_content)
|
||||||
|
# Parse key
|
||||||
|
try:
|
||||||
|
key = cryptography.hazmat.primitives.serialization.load_pem_private_key(
|
||||||
|
b_key_content,
|
||||||
|
password=to_bytes(passphrase) if passphrase is not None else None,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
raise KeyParsingError(f"error while loading key: {e}") from e
|
||||||
|
if isinstance(key, cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey):
|
||||||
|
rsa_pk = key.public_key().public_numbers()
|
||||||
|
return {
|
||||||
|
"key_obj": key,
|
||||||
|
"type": "rsa",
|
||||||
|
"alg": "RS256",
|
||||||
|
"jwk": {
|
||||||
|
"kty": "RSA",
|
||||||
|
"e": nopad_b64(convert_int_to_bytes(rsa_pk.e)),
|
||||||
|
"n": nopad_b64(convert_int_to_bytes(rsa_pk.n)),
|
||||||
|
},
|
||||||
|
"hash": "sha256",
|
||||||
|
}
|
||||||
|
if isinstance(
|
||||||
|
key, cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey
|
||||||
|
):
|
||||||
|
ec_pk = key.public_key().public_numbers()
|
||||||
|
if ec_pk.curve.name == "secp256r1":
|
||||||
|
bits = 256
|
||||||
|
alg = "ES256"
|
||||||
|
hashalg = "sha256"
|
||||||
|
point_size = 32
|
||||||
|
curve = "P-256"
|
||||||
|
elif ec_pk.curve.name == "secp384r1":
|
||||||
|
bits = 384
|
||||||
|
alg = "ES384"
|
||||||
|
hashalg = "sha384"
|
||||||
|
point_size = 48
|
||||||
|
curve = "P-384"
|
||||||
|
elif ec_pk.curve.name == "secp521r1":
|
||||||
|
# Not yet supported on Let's Encrypt side, see
|
||||||
|
# https://github.com/letsencrypt/boulder/issues/2217
|
||||||
|
bits = 521
|
||||||
|
alg = "ES512"
|
||||||
|
hashalg = "sha512"
|
||||||
|
point_size = 66
|
||||||
|
curve = "P-521"
|
||||||
|
else:
|
||||||
|
raise KeyParsingError(f"unknown elliptic curve: {ec_pk.curve.name}")
|
||||||
|
num_bytes = (bits + 7) // 8
|
||||||
|
return {
|
||||||
|
"key_obj": key,
|
||||||
|
"type": "ec",
|
||||||
|
"alg": alg,
|
||||||
|
"jwk": {
|
||||||
|
"kty": "EC",
|
||||||
|
"crv": curve,
|
||||||
|
"x": nopad_b64(convert_int_to_bytes(ec_pk.x, count=num_bytes)),
|
||||||
|
"y": nopad_b64(convert_int_to_bytes(ec_pk.y, count=num_bytes)),
|
||||||
|
},
|
||||||
|
"hash": hashalg,
|
||||||
|
"point_size": point_size,
|
||||||
|
}
|
||||||
|
raise KeyParsingError(f'unknown key type "{type(key)}"')
|
||||||
|
|
||||||
|
def sign(
|
||||||
|
self, *, payload64: str, protected64: str, key_data: dict[str, t.Any]
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
sign_payload = f"{protected64}.{payload64}".encode("utf8")
|
||||||
|
hashalg: type[cryptography.hazmat.primitives.hashes.HashAlgorithm]
|
||||||
|
if "mac_obj" in key_data:
|
||||||
|
mac = key_data["mac_obj"]()
|
||||||
|
mac.update(sign_payload)
|
||||||
|
signature = mac.finalize()
|
||||||
|
elif isinstance(
|
||||||
|
key_data["key_obj"],
|
||||||
|
cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey,
|
||||||
|
):
|
||||||
|
padding = cryptography.hazmat.primitives.asymmetric.padding.PKCS1v15()
|
||||||
|
hashalg = cryptography.hazmat.primitives.hashes.SHA256
|
||||||
|
signature = key_data["key_obj"].sign(sign_payload, padding, hashalg())
|
||||||
|
elif isinstance(
|
||||||
|
key_data["key_obj"],
|
||||||
|
cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey,
|
||||||
|
):
|
||||||
|
if key_data["hash"] == "sha256":
|
||||||
|
hashalg = cryptography.hazmat.primitives.hashes.SHA256
|
||||||
|
elif key_data["hash"] == "sha384":
|
||||||
|
hashalg = cryptography.hazmat.primitives.hashes.SHA384
|
||||||
|
elif key_data["hash"] == "sha512":
|
||||||
|
hashalg = cryptography.hazmat.primitives.hashes.SHA512
|
||||||
|
ecdsa = cryptography.hazmat.primitives.asymmetric.ec.ECDSA(hashalg())
|
||||||
|
r, s = cryptography.hazmat.primitives.asymmetric.utils.decode_dss_signature(
|
||||||
|
key_data["key_obj"].sign(sign_payload, ecdsa)
|
||||||
|
)
|
||||||
|
rr = convert_int_to_hex(r, digits=2 * key_data["point_size"])
|
||||||
|
ss = convert_int_to_hex(s, digits=2 * key_data["point_size"])
|
||||||
|
signature = binascii.unhexlify(rr) + binascii.unhexlify(ss)
|
||||||
|
else:
|
||||||
|
raise AssertionError("Can never be reached") # pragma: no cover
|
||||||
|
|
||||||
|
return {
|
||||||
|
"protected": protected64,
|
||||||
|
"payload": payload64,
|
||||||
|
"signature": nopad_b64(signature),
|
||||||
|
}
|
||||||
|
|
||||||
|
def create_mac_key(self, *, alg: str, key: str) -> dict[str, t.Any]:
|
||||||
|
"""Create a MAC key."""
|
||||||
|
hashalg: type[cryptography.hazmat.primitives.hashes.HashAlgorithm]
|
||||||
|
if alg == "HS256":
|
||||||
|
hashalg = cryptography.hazmat.primitives.hashes.SHA256
|
||||||
|
hashbytes = 32
|
||||||
|
elif alg == "HS384":
|
||||||
|
hashalg = cryptography.hazmat.primitives.hashes.SHA384
|
||||||
|
hashbytes = 48
|
||||||
|
elif alg == "HS512":
|
||||||
|
hashalg = cryptography.hazmat.primitives.hashes.SHA512
|
||||||
|
hashbytes = 64
|
||||||
|
else:
|
||||||
|
raise BackendException(
|
||||||
|
f"Unsupported MAC key algorithm for cryptography backend: {alg}"
|
||||||
|
)
|
||||||
|
key_bytes = base64.urlsafe_b64decode(key)
|
||||||
|
if len(key_bytes) < hashbytes:
|
||||||
|
raise BackendException(
|
||||||
|
f"{alg} key must be at least {hashbytes} bytes long (after Base64 decoding)"
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"mac_obj": lambda: cryptography.hazmat.primitives.hmac.HMAC(
|
||||||
|
key_bytes, hashalg()
|
||||||
|
),
|
||||||
|
"type": "hmac",
|
||||||
|
"alg": alg,
|
||||||
|
"jwk": {
|
||||||
|
"kty": "oct",
|
||||||
|
"k": key,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_ordered_csr_identifiers(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
csr_filename: str | os.PathLike | None = None,
|
||||||
|
csr_content: str | bytes | None = None,
|
||||||
|
) -> list[tuple[str, str]]:
|
||||||
|
"""
|
||||||
|
Return a list of requested identifiers (CN and SANs) for the CSR.
|
||||||
|
Each identifier is a pair (type, identifier), where type is either
|
||||||
|
'dns' or 'ip'.
|
||||||
|
|
||||||
|
The list is deduplicated, and if a CNAME is present, it will be returned
|
||||||
|
as the first element in the result.
|
||||||
|
"""
|
||||||
|
if csr_content is None:
|
||||||
|
if csr_filename is None:
|
||||||
|
raise BackendException(
|
||||||
|
"One of csr_content and csr_filename has to be provided"
|
||||||
|
)
|
||||||
|
b_csr_content = read_file(csr_filename)
|
||||||
|
else:
|
||||||
|
b_csr_content = to_bytes(csr_content)
|
||||||
|
csr = cryptography.x509.load_pem_x509_csr(b_csr_content)
|
||||||
|
|
||||||
|
identifiers = set()
|
||||||
|
result = []
|
||||||
|
|
||||||
|
def add_identifier(identifier: tuple[str, str]) -> None:
|
||||||
|
if identifier in identifiers:
|
||||||
|
return
|
||||||
|
identifiers.add(identifier)
|
||||||
|
result.append(identifier)
|
||||||
|
|
||||||
|
for sub in csr.subject:
|
||||||
|
if sub.oid == cryptography.x509.oid.NameOID.COMMON_NAME:
|
||||||
|
add_identifier(("dns", t.cast(str, sub.value)))
|
||||||
|
for extension in csr.extensions:
|
||||||
|
if (
|
||||||
|
extension.oid
|
||||||
|
== cryptography.x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME
|
||||||
|
):
|
||||||
|
for name in extension.value:
|
||||||
|
if isinstance(name, cryptography.x509.DNSName):
|
||||||
|
add_identifier(("dns", name.value))
|
||||||
|
elif isinstance(name, cryptography.x509.IPAddress):
|
||||||
|
add_identifier(("ip", name.value.compressed))
|
||||||
|
else:
|
||||||
|
raise BackendException(
|
||||||
|
f"Found unsupported SAN identifier {name}"
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_csr_identifiers(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
csr_filename: str | os.PathLike | None = None,
|
||||||
|
csr_content: str | bytes | bytes | None = None,
|
||||||
|
) -> set[tuple[str, str]]:
|
||||||
|
"""
|
||||||
|
Return a set of requested identifiers (CN and SANs) for the CSR.
|
||||||
|
Each identifier is a pair (type, identifier), where type is either
|
||||||
|
'dns' or 'ip'.
|
||||||
|
"""
|
||||||
|
return set(
|
||||||
|
self.get_ordered_csr_identifiers(
|
||||||
|
csr_filename=csr_filename, csr_content=csr_content
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_cert_days(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
cert_filename: str | os.PathLike | None = None,
|
||||||
|
cert_content: str | bytes | None = None,
|
||||||
|
now: datetime.datetime | None = None,
|
||||||
|
) -> int:
|
||||||
|
"""
|
||||||
|
Return the days the certificate in cert_filename remains valid and -1
|
||||||
|
if the file was not found. If cert_filename contains more than one
|
||||||
|
certificate, only the first one will be considered.
|
||||||
|
|
||||||
|
If now is not specified, datetime.datetime.now() is used.
|
||||||
|
"""
|
||||||
|
if cert_filename is not None:
|
||||||
|
cert_content = None
|
||||||
|
if os.path.exists(cert_filename):
|
||||||
|
cert_content = read_file(cert_filename)
|
||||||
|
else:
|
||||||
|
cert_content = to_bytes(cert_content)
|
||||||
|
|
||||||
|
if cert_content is None:
|
||||||
|
return -1
|
||||||
|
|
||||||
|
# Make sure we have at most one PEM. Otherwise cryptography 36.0.0 will barf.
|
||||||
|
b_cert_content = to_bytes(extract_first_pem(to_text(cert_content)) or "")
|
||||||
|
|
||||||
|
try:
|
||||||
|
cert = cryptography.x509.load_pem_x509_certificate(b_cert_content)
|
||||||
|
except Exception as e:
|
||||||
|
if cert_filename is None:
|
||||||
|
raise BackendException(f"Cannot parse certificate: {e}") from e
|
||||||
|
raise BackendException(
|
||||||
|
f"Cannot parse certificate {cert_filename}: {e}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
if now is None:
|
||||||
|
now = self.get_now()
|
||||||
|
else:
|
||||||
|
now = add_or_remove_timezone(now, with_timezone=CRYPTOGRAPHY_TIMEZONE)
|
||||||
|
return (get_not_valid_after(cert) - now).days
|
||||||
|
|
||||||
|
def create_chain_matcher(self, *, criterium: Criterium) -> ChainMatcher:
|
||||||
|
"""
|
||||||
|
Given a Criterium object, creates a ChainMatcher object.
|
||||||
|
"""
|
||||||
|
return CryptographyChainMatcher(criterium=criterium, module=self.module)
|
||||||
|
|
||||||
|
def get_cert_information(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
cert_filename: str | os.PathLike | None = None,
|
||||||
|
cert_content: str | bytes | None = None,
|
||||||
|
) -> CertificateInformation:
|
||||||
|
"""
|
||||||
|
Return some information on a X.509 certificate as a CertificateInformation object.
|
||||||
|
"""
|
||||||
|
if cert_filename is not None:
|
||||||
|
cert_content = read_file(cert_filename)
|
||||||
|
else:
|
||||||
|
cert_content = to_bytes(cert_content)
|
||||||
|
|
||||||
|
# Make sure we have at most one PEM. Otherwise cryptography 36.0.0 will barf.
|
||||||
|
b_cert_content = to_bytes(extract_first_pem(to_text(cert_content)) or "")
|
||||||
|
|
||||||
|
try:
|
||||||
|
cert = cryptography.x509.load_pem_x509_certificate(b_cert_content)
|
||||||
|
except Exception as e:
|
||||||
|
if cert_filename is None:
|
||||||
|
raise BackendException(f"Cannot parse certificate: {e}") from e
|
||||||
|
raise BackendException(
|
||||||
|
f"Cannot parse certificate {cert_filename}: {e}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
ski = None
|
||||||
|
try:
|
||||||
|
ext_ski = cert.extensions.get_extension_for_class(
|
||||||
|
cryptography.x509.SubjectKeyIdentifier
|
||||||
|
)
|
||||||
|
ski = ext_ski.value.digest
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
pass
|
||||||
|
|
||||||
|
aki = None
|
||||||
|
try:
|
||||||
|
ext_aki = cert.extensions.get_extension_for_class(
|
||||||
|
cryptography.x509.AuthorityKeyIdentifier
|
||||||
|
)
|
||||||
|
aki = ext_aki.value.key_identifier
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return CertificateInformation(
|
||||||
|
not_valid_after=get_not_valid_after(cert),
|
||||||
|
not_valid_before=get_not_valid_before(cert),
|
||||||
|
serial_number=cert.serial_number,
|
||||||
|
subject_key_identifier=ski,
|
||||||
|
authority_key_identifier=aki,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"CRYPTOGRAPHY_MINIMAL_VERSION",
|
||||||
|
"CRYPTOGRAPHY_ERROR",
|
||||||
|
"CRYPTOGRAPHY_VERSION",
|
||||||
|
"CRYPTOGRAPHY_ERROR",
|
||||||
|
"CryptographyBackend",
|
||||||
|
)
|
||||||
625
plugins/module_utils/_acme/backend_openssl_cli.py
Normal file
625
plugins/module_utils/_acme/backend_openssl_cli.py
Normal file
@@ -0,0 +1,625 @@
|
|||||||
|
# Copyright (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
|
||||||
|
# Copyright (c) 2021 Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import binascii
|
||||||
|
import datetime
|
||||||
|
import ipaddress
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import tempfile
|
||||||
|
import traceback
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.backends import (
|
||||||
|
CertificateInformation,
|
||||||
|
CryptoBackend,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.errors import (
|
||||||
|
BackendException,
|
||||||
|
KeyParsingError,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.utils import (
|
||||||
|
nopad_b64,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.math import (
|
||||||
|
convert_bytes_to_int,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._time import (
|
||||||
|
ensure_utc_timezone,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.certificates import (
|
||||||
|
Criterium,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_OPENSSL_ENVIRONMENT_UPDATE = {
|
||||||
|
"LANG": "C",
|
||||||
|
"LC_ALL": "C",
|
||||||
|
"LC_MESSAGES": "C",
|
||||||
|
"LC_CTYPE": "C",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_date(
|
||||||
|
out_text: str, *, name: str, cert_filename_suffix: str = ""
|
||||||
|
) -> datetime.datetime:
|
||||||
|
matcher = re.search(rf"\s+{name}\s*:\s+(.*)", out_text)
|
||||||
|
if matcher is None:
|
||||||
|
raise BackendException(f"No '{name}' date found{cert_filename_suffix}")
|
||||||
|
date_str = matcher.group(1)
|
||||||
|
try:
|
||||||
|
# For some reason Python's strptime() does not return any timezone information,
|
||||||
|
# even though the information is there and a supported timezone for all supported
|
||||||
|
# Python implementations (GMT). So we have to modify the datetime object by
|
||||||
|
# replacing it by UTC.
|
||||||
|
return ensure_utc_timezone(
|
||||||
|
datetime.datetime.strptime(date_str, "%b %d %H:%M:%S %Y %Z")
|
||||||
|
)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise BackendException(
|
||||||
|
f"Failed to parse '{name}' date{cert_filename_suffix}: {exc}"
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_octets(octets_text: str) -> bytes:
|
||||||
|
return binascii.unhexlify(re.sub(r"(\s|:)", "", octets_text).encode("utf-8"))
|
||||||
|
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def _extract_octets(
|
||||||
|
out_text: str,
|
||||||
|
*,
|
||||||
|
name: str,
|
||||||
|
required: t.Literal[False],
|
||||||
|
potential_prefixes: t.Iterable[str] | None = None,
|
||||||
|
) -> bytes | None: ...
|
||||||
|
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def _extract_octets(
|
||||||
|
out_text: str,
|
||||||
|
*,
|
||||||
|
name: str,
|
||||||
|
required: t.Literal[True],
|
||||||
|
potential_prefixes: t.Iterable[str] | None = None,
|
||||||
|
) -> bytes: ...
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_octets(
|
||||||
|
out_text: str,
|
||||||
|
*,
|
||||||
|
name: str,
|
||||||
|
required: bool = True,
|
||||||
|
potential_prefixes: t.Iterable[str] | None = None,
|
||||||
|
) -> bytes | None:
|
||||||
|
part = (
|
||||||
|
f"(?:{'|'.join(re.escape(pp) for pp in potential_prefixes)})"
|
||||||
|
if potential_prefixes
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
regexp = rf"\s+{name}:\s*\n\s+{part}([A-Fa-f0-9]{{2}}(?::[A-Fa-f0-9]{{2}})*)\s*\n"
|
||||||
|
match = re.search(regexp, out_text, re.MULTILINE | re.DOTALL)
|
||||||
|
if match is not None:
|
||||||
|
return _decode_octets(match.group(1))
|
||||||
|
if not required:
|
||||||
|
return None
|
||||||
|
raise BackendException(f"No '{name}' octet string found")
|
||||||
|
|
||||||
|
|
||||||
|
class OpenSSLCLIBackend(CryptoBackend):
|
||||||
|
def __init__(
|
||||||
|
self, *, module: AnsibleModule, openssl_binary: str | None = None
|
||||||
|
) -> None:
|
||||||
|
super().__init__(module=module, with_timezone=True)
|
||||||
|
if openssl_binary is None:
|
||||||
|
openssl_binary = module.get_bin_path("openssl", True)
|
||||||
|
self.openssl_binary = openssl_binary
|
||||||
|
|
||||||
|
def parse_key(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
key_file: str | os.PathLike | None = None,
|
||||||
|
key_content: str | None = None,
|
||||||
|
passphrase: str | None = None,
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
"""
|
||||||
|
Parses an RSA or Elliptic Curve key file in PEM format and returns key_data.
|
||||||
|
Raises KeyParsingError in case of errors.
|
||||||
|
"""
|
||||||
|
if passphrase is not None:
|
||||||
|
raise KeyParsingError("openssl backend does not support key passphrases")
|
||||||
|
# If key_file is not given, but key_content, write that to a temporary file
|
||||||
|
if key_file is None:
|
||||||
|
if key_content is None:
|
||||||
|
raise KeyParsingError(
|
||||||
|
"one of key_file and key_content must be specified"
|
||||||
|
)
|
||||||
|
fd, tmpsrc = tempfile.mkstemp()
|
||||||
|
self.module.add_cleanup_file(tmpsrc) # Ansible will delete the file on exit
|
||||||
|
f = os.fdopen(fd, "wb")
|
||||||
|
try:
|
||||||
|
f.write(key_content.encode("utf-8"))
|
||||||
|
key_file = tmpsrc
|
||||||
|
except Exception as err:
|
||||||
|
try:
|
||||||
|
f.close()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
raise KeyParsingError(
|
||||||
|
f"failed to create temporary content file: {err}",
|
||||||
|
exception=traceback.format_exc(),
|
||||||
|
) from err
|
||||||
|
f.close()
|
||||||
|
# Parse key
|
||||||
|
account_key_type = None
|
||||||
|
with open(key_file, "r", encoding="utf-8") as fi:
|
||||||
|
for line in fi:
|
||||||
|
m = re.match(
|
||||||
|
r"^\s*-{5,}BEGIN\s+(EC|RSA)\s+PRIVATE\s+KEY-{5,}\s*$", line
|
||||||
|
)
|
||||||
|
if m is not None:
|
||||||
|
account_key_type = m.group(1).lower()
|
||||||
|
break
|
||||||
|
if account_key_type is None:
|
||||||
|
# This happens for example if openssl_privatekey created this key
|
||||||
|
# (as opposed to the OpenSSL binary). For now, we assume this is
|
||||||
|
# an RSA key.
|
||||||
|
# FIXME: add some kind of auto-detection
|
||||||
|
account_key_type = "rsa"
|
||||||
|
if account_key_type not in ("rsa", "ec"):
|
||||||
|
raise KeyParsingError(f'unknown key type "{account_key_type}"')
|
||||||
|
|
||||||
|
openssl_keydump_cmd = [
|
||||||
|
self.openssl_binary,
|
||||||
|
account_key_type,
|
||||||
|
"-in",
|
||||||
|
str(key_file),
|
||||||
|
"-noout",
|
||||||
|
"-text",
|
||||||
|
]
|
||||||
|
rc, out, stderr = self.module.run_command(
|
||||||
|
openssl_keydump_cmd,
|
||||||
|
check_rc=False,
|
||||||
|
environ_update=_OPENSSL_ENVIRONMENT_UPDATE,
|
||||||
|
)
|
||||||
|
if rc != 0:
|
||||||
|
raise BackendException(
|
||||||
|
f"Error while running {' '.join(openssl_keydump_cmd)}: {stderr}"
|
||||||
|
)
|
||||||
|
|
||||||
|
out_text = to_text(out, errors="surrogate_or_strict")
|
||||||
|
|
||||||
|
if account_key_type == "rsa":
|
||||||
|
matcher = re.search(
|
||||||
|
r"modulus:\n\s+00:([a-f0-9\:\s]+?)\npublicExponent",
|
||||||
|
out_text,
|
||||||
|
re.MULTILINE | re.DOTALL,
|
||||||
|
)
|
||||||
|
if matcher is None:
|
||||||
|
raise KeyParsingError("cannot parse RSA key: modulus not found")
|
||||||
|
pub_hex = matcher.group(1)
|
||||||
|
|
||||||
|
matcher = re.search(
|
||||||
|
r"\npublicExponent: ([0-9]+)", out_text, re.MULTILINE | re.DOTALL
|
||||||
|
)
|
||||||
|
if matcher is None:
|
||||||
|
raise KeyParsingError("cannot parse RSA key: public exponent not found")
|
||||||
|
pub_exp = matcher.group(1)
|
||||||
|
pub_exp = f"{int(pub_exp):x}"
|
||||||
|
if len(pub_exp) % 2:
|
||||||
|
pub_exp = f"0{pub_exp}"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"key_file": str(key_file),
|
||||||
|
"type": "rsa",
|
||||||
|
"alg": "RS256",
|
||||||
|
"jwk": {
|
||||||
|
"kty": "RSA",
|
||||||
|
"e": nopad_b64(binascii.unhexlify(pub_exp.encode("utf-8"))),
|
||||||
|
"n": nopad_b64(_decode_octets(pub_hex)),
|
||||||
|
},
|
||||||
|
"hash": "sha256",
|
||||||
|
}
|
||||||
|
if account_key_type == "ec":
|
||||||
|
pub_data = re.search(
|
||||||
|
r"pub:\s*\n\s+04:([a-f0-9\:\s]+?)\nASN1 OID: (\S+)(?:\nNIST CURVE: (\S+))?",
|
||||||
|
out_text,
|
||||||
|
re.MULTILINE | re.DOTALL,
|
||||||
|
)
|
||||||
|
if pub_data is None:
|
||||||
|
raise KeyParsingError("cannot parse elliptic curve key")
|
||||||
|
pub_hex = _decode_octets(pub_data.group(1))
|
||||||
|
asn1_oid_curve = pub_data.group(2).lower()
|
||||||
|
nist_curve = pub_data.group(3).lower() if pub_data.group(3) else None
|
||||||
|
if asn1_oid_curve == "prime256v1" or nist_curve == "p-256":
|
||||||
|
bits = 256
|
||||||
|
alg = "ES256"
|
||||||
|
hashalg = "sha256"
|
||||||
|
point_size = 32
|
||||||
|
curve = "P-256"
|
||||||
|
elif asn1_oid_curve == "secp384r1" or nist_curve == "p-384":
|
||||||
|
bits = 384
|
||||||
|
alg = "ES384"
|
||||||
|
hashalg = "sha384"
|
||||||
|
point_size = 48
|
||||||
|
curve = "P-384"
|
||||||
|
elif asn1_oid_curve == "secp521r1" or nist_curve == "p-521":
|
||||||
|
# Not yet supported on Let's Encrypt side, see
|
||||||
|
# https://github.com/letsencrypt/boulder/issues/2217
|
||||||
|
bits = 521
|
||||||
|
alg = "ES512"
|
||||||
|
hashalg = "sha512"
|
||||||
|
point_size = 66
|
||||||
|
curve = "P-521"
|
||||||
|
else:
|
||||||
|
raise KeyParsingError(
|
||||||
|
f"unknown elliptic curve: {asn1_oid_curve} / {nist_curve}"
|
||||||
|
)
|
||||||
|
num_bytes = (bits + 7) // 8
|
||||||
|
if len(pub_hex) != 2 * num_bytes:
|
||||||
|
raise KeyParsingError(
|
||||||
|
f"bad elliptic curve point ({asn1_oid_curve} / {nist_curve})"
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"key_file": key_file,
|
||||||
|
"type": "ec",
|
||||||
|
"alg": alg,
|
||||||
|
"jwk": {
|
||||||
|
"kty": "EC",
|
||||||
|
"crv": curve,
|
||||||
|
"x": nopad_b64(pub_hex[:num_bytes]),
|
||||||
|
"y": nopad_b64(pub_hex[num_bytes:]),
|
||||||
|
},
|
||||||
|
"hash": hashalg,
|
||||||
|
"point_size": point_size,
|
||||||
|
}
|
||||||
|
raise KeyParsingError(
|
||||||
|
f"Internal error: unexpected account_key_type = {account_key_type!r}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def sign(
|
||||||
|
self, *, payload64: str, protected64: str, key_data: dict[str, t.Any]
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
sign_payload = f"{protected64}.{payload64}".encode("utf8")
|
||||||
|
if key_data["type"] == "hmac":
|
||||||
|
hex_key = (
|
||||||
|
binascii.hexlify(base64.urlsafe_b64decode(key_data["jwk"]["k"]))
|
||||||
|
).decode("ascii")
|
||||||
|
cmd_postfix = [
|
||||||
|
"-mac",
|
||||||
|
"hmac",
|
||||||
|
"-macopt",
|
||||||
|
f"hexkey:{hex_key}",
|
||||||
|
"-binary",
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
cmd_postfix = ["-sign", key_data["key_file"]]
|
||||||
|
openssl_sign_cmd = [
|
||||||
|
self.openssl_binary,
|
||||||
|
"dgst",
|
||||||
|
f"-{key_data['hash']}",
|
||||||
|
] + cmd_postfix
|
||||||
|
|
||||||
|
rc, out, err = self.module.run_command(
|
||||||
|
openssl_sign_cmd,
|
||||||
|
data=sign_payload,
|
||||||
|
check_rc=False,
|
||||||
|
binary_data=True,
|
||||||
|
environ_update=_OPENSSL_ENVIRONMENT_UPDATE,
|
||||||
|
)
|
||||||
|
if rc != 0:
|
||||||
|
raise BackendException(
|
||||||
|
f"Error while running {' '.join(openssl_sign_cmd)}: {err}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if key_data["type"] == "ec":
|
||||||
|
dummy, der_out, dummy = self.module.run_command(
|
||||||
|
[self.openssl_binary, "asn1parse", "-inform", "DER"],
|
||||||
|
data=out,
|
||||||
|
binary_data=True,
|
||||||
|
environ_update=_OPENSSL_ENVIRONMENT_UPDATE,
|
||||||
|
)
|
||||||
|
expected_len = 2 * key_data["point_size"]
|
||||||
|
sig = re.findall(
|
||||||
|
rf"prim:\s+INTEGER\s+:([0-9A-F]{{1,{expected_len}}})\n",
|
||||||
|
to_text(der_out, errors="surrogate_or_strict"),
|
||||||
|
)
|
||||||
|
if len(sig) != 2:
|
||||||
|
der_output = to_text(der_out, errors="surrogate_or_strict")
|
||||||
|
raise BackendException(
|
||||||
|
f"failed to generate Elliptic Curve signature; cannot parse DER output: {der_output}"
|
||||||
|
)
|
||||||
|
sig[0] = (expected_len - len(sig[0])) * "0" + sig[0]
|
||||||
|
sig[1] = (expected_len - len(sig[1])) * "0" + sig[1]
|
||||||
|
out = binascii.unhexlify(sig[0]) + binascii.unhexlify(sig[1])
|
||||||
|
|
||||||
|
return {
|
||||||
|
"protected": protected64,
|
||||||
|
"payload": payload64,
|
||||||
|
"signature": nopad_b64(to_bytes(out)),
|
||||||
|
}
|
||||||
|
|
||||||
|
def create_mac_key(self, *, alg: str, key: str) -> dict[str, t.Any]:
|
||||||
|
"""Create a MAC key."""
|
||||||
|
if alg == "HS256":
|
||||||
|
hashalg = "sha256"
|
||||||
|
hashbytes = 32
|
||||||
|
elif alg == "HS384":
|
||||||
|
hashalg = "sha384"
|
||||||
|
hashbytes = 48
|
||||||
|
elif alg == "HS512":
|
||||||
|
hashalg = "sha512"
|
||||||
|
hashbytes = 64
|
||||||
|
else:
|
||||||
|
raise BackendException(
|
||||||
|
f"Unsupported MAC key algorithm for OpenSSL backend: {alg}"
|
||||||
|
)
|
||||||
|
key_bytes = base64.urlsafe_b64decode(key)
|
||||||
|
if len(key_bytes) < hashbytes:
|
||||||
|
raise BackendException(
|
||||||
|
f"{alg} key must be at least {hashbytes} bytes long (after Base64 decoding)"
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"type": "hmac",
|
||||||
|
"alg": alg,
|
||||||
|
"jwk": {
|
||||||
|
"kty": "oct",
|
||||||
|
"k": key,
|
||||||
|
},
|
||||||
|
"hash": hashalg,
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _normalize_ip(ip: str) -> str:
|
||||||
|
try:
|
||||||
|
return ipaddress.ip_address(ip).compressed
|
||||||
|
except ValueError:
|
||||||
|
# We do not want to error out on something IPAddress() cannot parse
|
||||||
|
return ip
|
||||||
|
|
||||||
|
def get_ordered_csr_identifiers(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
csr_filename: str | os.PathLike | None = None,
|
||||||
|
csr_content: str | bytes | None = None,
|
||||||
|
) -> list[tuple[str, str]]:
|
||||||
|
"""
|
||||||
|
Return a list of requested identifiers (CN and SANs) for the CSR.
|
||||||
|
Each identifier is a pair (type, identifier), where type is either
|
||||||
|
'dns' or 'ip'.
|
||||||
|
|
||||||
|
The list is deduplicated, and if a CNAME is present, it will be returned
|
||||||
|
as the first element in the result.
|
||||||
|
"""
|
||||||
|
filename = csr_filename
|
||||||
|
data = None
|
||||||
|
if csr_content is not None:
|
||||||
|
filename = "/dev/stdin"
|
||||||
|
data = to_bytes(csr_content)
|
||||||
|
|
||||||
|
openssl_csr_cmd = [
|
||||||
|
self.openssl_binary,
|
||||||
|
"req",
|
||||||
|
"-in",
|
||||||
|
str(filename),
|
||||||
|
"-noout",
|
||||||
|
"-text",
|
||||||
|
]
|
||||||
|
rc, out, err = self.module.run_command(
|
||||||
|
openssl_csr_cmd,
|
||||||
|
data=data,
|
||||||
|
check_rc=False,
|
||||||
|
binary_data=True,
|
||||||
|
environ_update=_OPENSSL_ENVIRONMENT_UPDATE,
|
||||||
|
)
|
||||||
|
if rc != 0:
|
||||||
|
raise BackendException(
|
||||||
|
f"Error while running {' '.join(openssl_csr_cmd)}: {err}"
|
||||||
|
)
|
||||||
|
|
||||||
|
identifiers = set()
|
||||||
|
result = []
|
||||||
|
|
||||||
|
def add_identifier(identifier: tuple[str, str]) -> None:
|
||||||
|
if identifier in identifiers:
|
||||||
|
return
|
||||||
|
identifiers.add(identifier)
|
||||||
|
result.append(identifier)
|
||||||
|
|
||||||
|
common_name = re.search(
|
||||||
|
r"Subject:.* CN\s?=\s?([^\s,;/]+)",
|
||||||
|
to_text(out, errors="surrogate_or_strict"),
|
||||||
|
)
|
||||||
|
if common_name is not None:
|
||||||
|
add_identifier(("dns", common_name.group(1)))
|
||||||
|
subject_alt_names = re.search(
|
||||||
|
r"X509v3 Subject Alternative Name: (?:critical)?\n +([^\n]+)\n",
|
||||||
|
to_text(out, errors="surrogate_or_strict"),
|
||||||
|
re.MULTILINE | re.DOTALL,
|
||||||
|
)
|
||||||
|
if subject_alt_names is not None:
|
||||||
|
for san in subject_alt_names.group(1).split(", "):
|
||||||
|
if san.lower().startswith("dns:"):
|
||||||
|
add_identifier(("dns", san[4:]))
|
||||||
|
elif san.lower().startswith("ip:"):
|
||||||
|
add_identifier(("ip", self._normalize_ip(san[3:])))
|
||||||
|
elif san.lower().startswith("ip address:"):
|
||||||
|
add_identifier(("ip", self._normalize_ip(san[11:])))
|
||||||
|
else:
|
||||||
|
raise BackendException(f'Found unsupported SAN identifier "{san}"')
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_csr_identifiers(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
csr_filename: str | os.PathLike | None = None,
|
||||||
|
csr_content: str | bytes | None = None,
|
||||||
|
) -> set[tuple[str, str]]:
|
||||||
|
"""
|
||||||
|
Return a set of requested identifiers (CN and SANs) for the CSR.
|
||||||
|
Each identifier is a pair (type, identifier), where type is either
|
||||||
|
'dns' or 'ip'.
|
||||||
|
"""
|
||||||
|
return set(
|
||||||
|
self.get_ordered_csr_identifiers(
|
||||||
|
csr_filename=csr_filename, csr_content=csr_content
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_cert_days(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
cert_filename: str | os.PathLike | None = None,
|
||||||
|
cert_content: str | bytes | None = None,
|
||||||
|
now: datetime.datetime | None = None,
|
||||||
|
) -> int:
|
||||||
|
"""
|
||||||
|
Return the days the certificate in cert_filename remains valid and -1
|
||||||
|
if the file was not found. If cert_filename contains more than one
|
||||||
|
certificate, only the first one will be considered.
|
||||||
|
|
||||||
|
If now is not specified, datetime.datetime.now() is used.
|
||||||
|
"""
|
||||||
|
filename = cert_filename
|
||||||
|
data = None
|
||||||
|
if cert_content is not None:
|
||||||
|
filename = "/dev/stdin"
|
||||||
|
data = to_bytes(cert_content)
|
||||||
|
cert_filename_suffix = ""
|
||||||
|
elif cert_filename is not None:
|
||||||
|
if not os.path.exists(cert_filename):
|
||||||
|
return -1
|
||||||
|
cert_filename_suffix = f" in {cert_filename}"
|
||||||
|
else:
|
||||||
|
return -1
|
||||||
|
|
||||||
|
openssl_cert_cmd = [
|
||||||
|
self.openssl_binary,
|
||||||
|
"x509",
|
||||||
|
"-in",
|
||||||
|
str(filename),
|
||||||
|
"-noout",
|
||||||
|
"-text",
|
||||||
|
]
|
||||||
|
rc, out, err = self.module.run_command(
|
||||||
|
openssl_cert_cmd,
|
||||||
|
data=data,
|
||||||
|
check_rc=False,
|
||||||
|
binary_data=True,
|
||||||
|
environ_update=_OPENSSL_ENVIRONMENT_UPDATE,
|
||||||
|
)
|
||||||
|
if rc != 0:
|
||||||
|
raise BackendException(
|
||||||
|
f"Error while running {' '.join(openssl_cert_cmd)}: {err}"
|
||||||
|
)
|
||||||
|
|
||||||
|
out_text = to_text(out, errors="surrogate_or_strict")
|
||||||
|
not_after = _extract_date(
|
||||||
|
out_text, name="Not After", cert_filename_suffix=cert_filename_suffix
|
||||||
|
)
|
||||||
|
if now is None:
|
||||||
|
now = self.get_now()
|
||||||
|
else:
|
||||||
|
now = ensure_utc_timezone(now)
|
||||||
|
return (not_after - now).days
|
||||||
|
|
||||||
|
def create_chain_matcher(self, *, criterium: Criterium) -> t.NoReturn:
|
||||||
|
"""
|
||||||
|
Given a Criterium object, creates a ChainMatcher object.
|
||||||
|
"""
|
||||||
|
raise BackendException(
|
||||||
|
'Alternate chain matching can only be used with the "cryptography" backend.'
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_cert_information(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
cert_filename: str | os.PathLike | None = None,
|
||||||
|
cert_content: str | bytes | None = None,
|
||||||
|
) -> CertificateInformation:
|
||||||
|
"""
|
||||||
|
Return some information on a X.509 certificate as a CertificateInformation object.
|
||||||
|
"""
|
||||||
|
filename = cert_filename
|
||||||
|
data = None
|
||||||
|
if cert_filename is not None:
|
||||||
|
cert_filename_suffix = f" in {cert_filename}"
|
||||||
|
else:
|
||||||
|
filename = "/dev/stdin"
|
||||||
|
data = to_bytes(cert_content)
|
||||||
|
cert_filename_suffix = ""
|
||||||
|
|
||||||
|
openssl_cert_cmd = [
|
||||||
|
self.openssl_binary,
|
||||||
|
"x509",
|
||||||
|
"-in",
|
||||||
|
str(filename),
|
||||||
|
"-noout",
|
||||||
|
"-text",
|
||||||
|
]
|
||||||
|
rc, out, err = self.module.run_command(
|
||||||
|
openssl_cert_cmd,
|
||||||
|
data=data,
|
||||||
|
check_rc=False,
|
||||||
|
binary_data=True,
|
||||||
|
environ_update=_OPENSSL_ENVIRONMENT_UPDATE,
|
||||||
|
)
|
||||||
|
if rc != 0:
|
||||||
|
raise BackendException(
|
||||||
|
f"Error while running {' '.join(openssl_cert_cmd)}: {err}"
|
||||||
|
)
|
||||||
|
|
||||||
|
out_text = to_text(out, errors="surrogate_or_strict")
|
||||||
|
|
||||||
|
not_after = _extract_date(
|
||||||
|
out_text, name="Not After", cert_filename_suffix=cert_filename_suffix
|
||||||
|
)
|
||||||
|
not_before = _extract_date(
|
||||||
|
out_text, name="Not Before", cert_filename_suffix=cert_filename_suffix
|
||||||
|
)
|
||||||
|
|
||||||
|
sn = re.search(
|
||||||
|
r" Serial Number: ([0-9]+)",
|
||||||
|
to_text(out, errors="surrogate_or_strict"),
|
||||||
|
re.MULTILINE | re.DOTALL,
|
||||||
|
)
|
||||||
|
if sn:
|
||||||
|
serial = int(sn.group(1))
|
||||||
|
else:
|
||||||
|
serial = convert_bytes_to_int(
|
||||||
|
_extract_octets(out_text, name="Serial Number", required=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
ski = _extract_octets(
|
||||||
|
out_text, name="X509v3 Subject Key Identifier", required=False
|
||||||
|
)
|
||||||
|
aki = _extract_octets(
|
||||||
|
out_text,
|
||||||
|
name="X509v3 Authority Key Identifier",
|
||||||
|
required=False,
|
||||||
|
potential_prefixes=["keyid:", ""],
|
||||||
|
)
|
||||||
|
|
||||||
|
return CertificateInformation(
|
||||||
|
not_valid_after=not_after,
|
||||||
|
not_valid_before=not_before,
|
||||||
|
serial_number=serial,
|
||||||
|
subject_key_identifier=ski,
|
||||||
|
authority_key_identifier=aki,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("OpenSSLCLIBackend",)
|
||||||
230
plugins/module_utils/_acme/backends.py
Normal file
230
plugins/module_utils/_acme/backends.py
Normal file
@@ -0,0 +1,230 @@
|
|||||||
|
# Copyright (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
|
||||||
|
# Copyright (c) 2021 Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import datetime
|
||||||
|
import re
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.errors import (
|
||||||
|
BackendException,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
|
OpenSSLObjectError,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._time import (
|
||||||
|
UTC,
|
||||||
|
ensure_utc_timezone,
|
||||||
|
from_epoch_seconds,
|
||||||
|
get_epoch_seconds,
|
||||||
|
get_now_datetime,
|
||||||
|
get_relative_time_option,
|
||||||
|
remove_timezone,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
import os
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.certificates import (
|
||||||
|
ChainMatcher,
|
||||||
|
Criterium,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CertificateInformation(t.NamedTuple):
|
||||||
|
not_valid_after: datetime.datetime
|
||||||
|
not_valid_before: datetime.datetime
|
||||||
|
serial_number: int
|
||||||
|
subject_key_identifier: bytes | None
|
||||||
|
authority_key_identifier: bytes | None
|
||||||
|
|
||||||
|
|
||||||
|
_FRACTIONAL_MATCHER = re.compile(
|
||||||
|
r"^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})(|\.\d+)(Z|[+-]\d{2}:?\d{2}.*)$"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _reduce_fractional_digits(timestamp_str: str) -> str:
|
||||||
|
"""
|
||||||
|
Given a RFC 3339 timestamp that includes too many digits for the fractional seconds part, reduces these to at most 6.
|
||||||
|
"""
|
||||||
|
# RFC 3339 (https://www.rfc-editor.org/info/rfc3339)
|
||||||
|
m = _FRACTIONAL_MATCHER.match(timestamp_str)
|
||||||
|
if not m:
|
||||||
|
raise BackendException(f"Cannot parse ISO 8601 timestamp {timestamp_str!r}")
|
||||||
|
timestamp, fractional, timezone = m.groups()
|
||||||
|
if len(fractional) > 7:
|
||||||
|
# Python does not support anything smaller than microseconds
|
||||||
|
# (Golang supports nanoseconds, Boulder often emits more fractional digits, which Python chokes on)
|
||||||
|
fractional = fractional[:7]
|
||||||
|
return f"{timestamp}{fractional}{timezone}"
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_acme_timestamp(
|
||||||
|
timestamp_str: str, *, with_timezone: bool
|
||||||
|
) -> datetime.datetime:
|
||||||
|
"""
|
||||||
|
Parses a RFC 3339 timestamp.
|
||||||
|
"""
|
||||||
|
# RFC 3339 (https://www.rfc-editor.org/info/rfc3339)
|
||||||
|
timestamp_str = _reduce_fractional_digits(timestamp_str)
|
||||||
|
for time_format in (
|
||||||
|
"%Y-%m-%dT%H:%M:%SZ",
|
||||||
|
"%Y-%m-%dT%H:%M:%S.%fZ",
|
||||||
|
"%Y-%m-%dT%H:%M:%S%z",
|
||||||
|
"%Y-%m-%dT%H:%M:%S.%f%z",
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
result = datetime.datetime.strptime(timestamp_str, time_format)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
return (
|
||||||
|
ensure_utc_timezone(result)
|
||||||
|
if with_timezone
|
||||||
|
else remove_timezone(result)
|
||||||
|
)
|
||||||
|
raise BackendException(f"Cannot parse ISO 8601 timestamp {timestamp_str!r}")
|
||||||
|
|
||||||
|
|
||||||
|
class CryptoBackend(metaclass=abc.ABCMeta):
|
||||||
|
def __init__(self, *, module: AnsibleModule, with_timezone: bool = False) -> None:
|
||||||
|
self.module = module
|
||||||
|
self._with_timezone = with_timezone
|
||||||
|
|
||||||
|
def get_now(self) -> datetime.datetime:
|
||||||
|
return get_now_datetime(with_timezone=self._with_timezone)
|
||||||
|
|
||||||
|
def parse_acme_timestamp(self, timestamp_str: str) -> datetime.datetime:
|
||||||
|
# RFC 3339 (https://www.rfc-editor.org/info/rfc3339)
|
||||||
|
return _parse_acme_timestamp(timestamp_str, with_timezone=self._with_timezone)
|
||||||
|
|
||||||
|
def parse_module_parameter(self, *, value: str, name: str) -> datetime.datetime:
|
||||||
|
try:
|
||||||
|
result = get_relative_time_option(
|
||||||
|
value, input_name=name, with_timezone=self._with_timezone
|
||||||
|
)
|
||||||
|
if result is None:
|
||||||
|
raise BackendException(f"Invalid value for {name}: {value!r}")
|
||||||
|
return result
|
||||||
|
except OpenSSLObjectError as exc:
|
||||||
|
raise BackendException(str(exc)) from exc
|
||||||
|
|
||||||
|
def interpolate_timestamp(
|
||||||
|
self,
|
||||||
|
timestamp_start: datetime.datetime,
|
||||||
|
timestamp_end: datetime.datetime,
|
||||||
|
*,
|
||||||
|
percentage: float,
|
||||||
|
) -> datetime.datetime:
|
||||||
|
start = get_epoch_seconds(timestamp_start)
|
||||||
|
end = get_epoch_seconds(timestamp_end)
|
||||||
|
return from_epoch_seconds(
|
||||||
|
start + percentage * (end - start), with_timezone=self._with_timezone
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_utc_datetime(self, *args, **kwargs) -> datetime.datetime:
|
||||||
|
kwargs_ext: dict[str, t.Any] = dict(kwargs)
|
||||||
|
if self._with_timezone and ("tzinfo" not in kwargs_ext and len(args) < 8):
|
||||||
|
kwargs_ext["tzinfo"] = UTC
|
||||||
|
result = datetime.datetime(*args, **kwargs_ext)
|
||||||
|
if self._with_timezone and ("tzinfo" in kwargs or len(args) >= 8):
|
||||||
|
result = ensure_utc_timezone(result)
|
||||||
|
return result
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def parse_key(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
key_file: str | os.PathLike | None = None,
|
||||||
|
key_content: str | None = None,
|
||||||
|
passphrase: str | None = None,
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
"""
|
||||||
|
Parses an RSA or Elliptic Curve key file in PEM format and returns key_data.
|
||||||
|
Raises KeyParsingError in case of errors.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def sign(
|
||||||
|
self, *, payload64: str, protected64: str, key_data: dict[str, t.Any]
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def create_mac_key(self, *, alg: str, key: str) -> dict[str, t.Any]:
|
||||||
|
"""Create a MAC key."""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def get_ordered_csr_identifiers(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
csr_filename: str | os.PathLike | None = None,
|
||||||
|
csr_content: str | bytes | None = None,
|
||||||
|
) -> list[tuple[str, str]]:
|
||||||
|
"""
|
||||||
|
Return a list of requested identifiers (CN and SANs) for the CSR.
|
||||||
|
Each identifier is a pair (type, identifier), where type is either
|
||||||
|
'dns' or 'ip'.
|
||||||
|
|
||||||
|
The list is deduplicated, and if a CNAME is present, it will be returned
|
||||||
|
as the first element in the result.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def get_csr_identifiers(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
csr_filename: str | os.PathLike | None = None,
|
||||||
|
csr_content: str | bytes | None = None,
|
||||||
|
) -> set[tuple[str, str]]:
|
||||||
|
"""
|
||||||
|
Return a set of requested identifiers (CN and SANs) for the CSR.
|
||||||
|
Each identifier is a pair (type, identifier), where type is either
|
||||||
|
'dns' or 'ip'.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def get_cert_days(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
cert_filename: str | os.PathLike | None = None,
|
||||||
|
cert_content: str | bytes | None = None,
|
||||||
|
now: datetime.datetime | None = None,
|
||||||
|
) -> int:
|
||||||
|
"""
|
||||||
|
Return the days the certificate in cert_filename remains valid and -1
|
||||||
|
if the file was not found. If cert_filename contains more than one
|
||||||
|
certificate, only the first one will be considered.
|
||||||
|
|
||||||
|
If now is not specified, datetime.datetime.now() is used.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def create_chain_matcher(self, *, criterium: Criterium) -> ChainMatcher:
|
||||||
|
"""
|
||||||
|
Given a Criterium object, creates a ChainMatcher object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def get_cert_information(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
cert_filename: str | os.PathLike | None = None,
|
||||||
|
cert_content: str | bytes | None = None,
|
||||||
|
) -> CertificateInformation:
|
||||||
|
"""
|
||||||
|
Return some information on a X.509 certificate as a CertificateInformation object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("CertificateInformation", "CryptoBackend")
|
||||||
419
plugins/module_utils/_acme/certificate.py
Normal file
419
plugins/module_utils/_acme/certificate.py
Normal file
@@ -0,0 +1,419 @@
|
|||||||
|
# Copyright (c) 2024 Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.account import (
|
||||||
|
ACMEAccount,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.acme import (
|
||||||
|
ACMEClient,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.certificates import (
|
||||||
|
CertificateChain,
|
||||||
|
Criterium,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.challenges import (
|
||||||
|
Authorization,
|
||||||
|
wait_for_validation,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.errors import (
|
||||||
|
ModuleFailException,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.io import (
|
||||||
|
write_file,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.orders import Order
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.utils import (
|
||||||
|
pem_to_der,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.backends import (
|
||||||
|
CryptoBackend,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.certificates import (
|
||||||
|
ChainMatcher,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.challenges import (
|
||||||
|
Challenge,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ACMECertificateClient:
|
||||||
|
"""
|
||||||
|
ACME v2 client class. Uses an ACME account object and a CSR to
|
||||||
|
start and validate ACME challenges and download the respective
|
||||||
|
certificates.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
module: AnsibleModule,
|
||||||
|
backend: CryptoBackend,
|
||||||
|
client: ACMEClient | None = None,
|
||||||
|
account: ACMEAccount | None = None,
|
||||||
|
) -> None:
|
||||||
|
self.module = module
|
||||||
|
self.version = module.params["acme_version"]
|
||||||
|
self.csr = module.params.get("csr")
|
||||||
|
self.csr_content = module.params.get("csr_content")
|
||||||
|
if client is None:
|
||||||
|
client = ACMEClient(module=module, backend=backend)
|
||||||
|
self.client = client
|
||||||
|
if account is None:
|
||||||
|
account = ACMEAccount(client=self.client)
|
||||||
|
self.account = account
|
||||||
|
self.order_uri = module.params.get("order_uri")
|
||||||
|
self.order_creation_error_strategy = module.params.get(
|
||||||
|
"order_creation_error_strategy", "auto"
|
||||||
|
)
|
||||||
|
self.order_creation_max_retries = module.params.get(
|
||||||
|
"order_creation_max_retries", 3
|
||||||
|
)
|
||||||
|
|
||||||
|
# Make sure account exists
|
||||||
|
dummy, account_data = self.account.setup_account(allow_creation=False)
|
||||||
|
if account_data is None:
|
||||||
|
raise ModuleFailException(msg="Account does not exist or is deactivated.")
|
||||||
|
|
||||||
|
if self.csr is not None and not os.path.exists(self.csr):
|
||||||
|
raise ModuleFailException(f"CSR {self.csr} not found")
|
||||||
|
|
||||||
|
# Extract list of identifiers from CSR
|
||||||
|
if self.csr is not None or self.csr_content is not None:
|
||||||
|
self.identifiers: list[tuple[str, str]] | None = (
|
||||||
|
self.client.backend.get_ordered_csr_identifiers(
|
||||||
|
csr_filename=self.csr, csr_content=self.csr_content
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.identifiers = None
|
||||||
|
|
||||||
|
def parse_select_chain(
|
||||||
|
self, select_chain: list[dict[str, t.Any]] | None
|
||||||
|
) -> list[ChainMatcher]:
|
||||||
|
select_chain_matcher = []
|
||||||
|
if select_chain:
|
||||||
|
for criterium_idx, criterium in enumerate(select_chain):
|
||||||
|
try:
|
||||||
|
select_chain_matcher.append(
|
||||||
|
self.client.backend.create_chain_matcher(
|
||||||
|
criterium=Criterium(
|
||||||
|
criterium=criterium, index=criterium_idx
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except ValueError as exc:
|
||||||
|
self.module.warn(
|
||||||
|
f"Error while parsing criterium: {exc}. Ignoring criterium."
|
||||||
|
)
|
||||||
|
return select_chain_matcher
|
||||||
|
|
||||||
|
def load_order(self) -> Order:
|
||||||
|
if not self.order_uri:
|
||||||
|
raise ModuleFailException("The order URI has not been provided")
|
||||||
|
order = Order.from_url(client=self.client, url=self.order_uri)
|
||||||
|
order.load_authorizations(client=self.client)
|
||||||
|
return order
|
||||||
|
|
||||||
|
def create_order(
|
||||||
|
self, *, replaces_cert_id: str | None = None, profile: str | None = None
|
||||||
|
) -> Order:
|
||||||
|
"""
|
||||||
|
Create a new order.
|
||||||
|
"""
|
||||||
|
if self.identifiers is None:
|
||||||
|
raise ModuleFailException("No identifiers have been provided")
|
||||||
|
order = Order.create_with_error_handling(
|
||||||
|
client=self.client,
|
||||||
|
identifiers=self.identifiers,
|
||||||
|
error_strategy=self.order_creation_error_strategy,
|
||||||
|
error_max_retries=self.order_creation_max_retries,
|
||||||
|
replaces_cert_id=replaces_cert_id,
|
||||||
|
profile=profile,
|
||||||
|
message_callback=self.module.warn,
|
||||||
|
)
|
||||||
|
self.order_uri = order.url
|
||||||
|
order.load_authorizations(client=self.client)
|
||||||
|
return order
|
||||||
|
|
||||||
|
def get_challenges_data(
|
||||||
|
self, order: Order
|
||||||
|
) -> tuple[list[dict[str, t.Any]], dict[str, list[str]]]:
|
||||||
|
"""
|
||||||
|
Get challenge details.
|
||||||
|
|
||||||
|
Return a tuple of generic challenge details, and specialized DNS challenge details.
|
||||||
|
"""
|
||||||
|
data: list[dict[str, t.Any]] = []
|
||||||
|
data_dns: dict[str, list[str]] = {}
|
||||||
|
dns_challenge_type = "dns-01"
|
||||||
|
for authz in order.authorizations.values():
|
||||||
|
# Skip valid authentications: their challenges are already valid
|
||||||
|
# and do not need to be returned
|
||||||
|
if authz.status == "valid":
|
||||||
|
continue
|
||||||
|
challenge_data = authz.get_challenge_data(client=self.client)
|
||||||
|
data.append(
|
||||||
|
{
|
||||||
|
"identifier": authz.identifier,
|
||||||
|
"identifier_type": authz.identifier_type,
|
||||||
|
"challenges": challenge_data,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
dns_challenge = challenge_data.get(dns_challenge_type)
|
||||||
|
if dns_challenge:
|
||||||
|
values = data_dns.get(dns_challenge["record"])
|
||||||
|
if values is None:
|
||||||
|
values = []
|
||||||
|
data_dns[dns_challenge["record"]] = values
|
||||||
|
values.append(dns_challenge["resource_value"])
|
||||||
|
return data, data_dns
|
||||||
|
|
||||||
|
def check_that_authorizations_can_be_used(self, order: Order) -> None:
|
||||||
|
bad_authzs = []
|
||||||
|
for authz in order.authorizations.values():
|
||||||
|
if authz.status not in ("valid", "pending"):
|
||||||
|
bad_authzs.append(
|
||||||
|
f"{authz.combined_identifier} (status={authz.status!r})"
|
||||||
|
)
|
||||||
|
if bad_authzs:
|
||||||
|
bad_authzs_str = ", ".join(sorted(bad_authzs))
|
||||||
|
raise ModuleFailException(
|
||||||
|
"Some of the authorizations for the order are in a bad state, so the order"
|
||||||
|
f" can no longer be satisfied: {bad_authzs_str}",
|
||||||
|
)
|
||||||
|
|
||||||
|
def collect_invalid_authzs(self, order: Order) -> list[Authorization]:
|
||||||
|
return [
|
||||||
|
authz
|
||||||
|
for authz in order.authorizations.values()
|
||||||
|
if authz.status == "invalid"
|
||||||
|
]
|
||||||
|
|
||||||
|
def collect_pending_authzs(self, order: Order) -> list[Authorization]:
|
||||||
|
return [
|
||||||
|
authz
|
||||||
|
for authz in order.authorizations.values()
|
||||||
|
if authz.status == "pending"
|
||||||
|
]
|
||||||
|
|
||||||
|
def call_validate(
|
||||||
|
self,
|
||||||
|
pending_authzs: list[Authorization],
|
||||||
|
*,
|
||||||
|
get_challenge: t.Callable[[Authorization], str],
|
||||||
|
wait: bool = True,
|
||||||
|
) -> list[tuple[Authorization, str, Challenge | None]]:
|
||||||
|
authzs_with_challenges_to_wait_for = []
|
||||||
|
for authz in pending_authzs:
|
||||||
|
challenge_type = get_challenge(authz)
|
||||||
|
authz.call_validate(
|
||||||
|
client=self.client, challenge_type=challenge_type, wait=wait
|
||||||
|
)
|
||||||
|
authzs_with_challenges_to_wait_for.append(
|
||||||
|
(
|
||||||
|
authz,
|
||||||
|
challenge_type,
|
||||||
|
authz.find_challenge(challenge_type=challenge_type),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return authzs_with_challenges_to_wait_for
|
||||||
|
|
||||||
|
def wait_for_validation(self, authzs_to_wait_for: list[Authorization]) -> None:
|
||||||
|
wait_for_validation(authzs=authzs_to_wait_for, client=self.client)
|
||||||
|
|
||||||
|
def _download_alternate_chains(
|
||||||
|
self, cert: CertificateChain
|
||||||
|
) -> list[CertificateChain]:
|
||||||
|
alternate_chains = []
|
||||||
|
for alternate in cert.alternates:
|
||||||
|
try:
|
||||||
|
alt_cert = CertificateChain.download(client=self.client, url=alternate)
|
||||||
|
except ModuleFailException as e:
|
||||||
|
self.module.warn(
|
||||||
|
f"Error while downloading alternative certificate {alternate}: {e}"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
if alt_cert.cert is not None:
|
||||||
|
alternate_chains.append(alt_cert)
|
||||||
|
else:
|
||||||
|
self.module.warn(
|
||||||
|
f"Error while downloading alternative certificate {alternate}: no certificate found"
|
||||||
|
)
|
||||||
|
return alternate_chains
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def download_certificate(
|
||||||
|
self, order: Order, *, download_all_chains: t.Literal[True] = True
|
||||||
|
) -> tuple[CertificateChain, list[CertificateChain]]: ...
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def download_certificate(
|
||||||
|
self, order: Order, *, download_all_chains: t.Literal[False]
|
||||||
|
) -> tuple[CertificateChain, None]: ...
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def download_certificate(
|
||||||
|
self, order: Order, *, download_all_chains: bool = True
|
||||||
|
) -> tuple[CertificateChain, list[CertificateChain] | None]: ...
|
||||||
|
|
||||||
|
def download_certificate(
|
||||||
|
self, order: Order, *, download_all_chains: bool = True
|
||||||
|
) -> tuple[CertificateChain, list[CertificateChain] | None]:
|
||||||
|
"""
|
||||||
|
Download certificate from a valid oder.
|
||||||
|
"""
|
||||||
|
if order.status != "valid":
|
||||||
|
raise ModuleFailException(
|
||||||
|
f"The order must be valid, but has state {order.status!r}!"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not order.certificate_uri:
|
||||||
|
raise ModuleFailException(
|
||||||
|
f"Order's crtificate URL {order.certificate_uri!r} is empty!"
|
||||||
|
)
|
||||||
|
|
||||||
|
cert = CertificateChain.download(client=self.client, url=order.certificate_uri)
|
||||||
|
if cert.cert is None:
|
||||||
|
raise ModuleFailException(
|
||||||
|
f"Certificate at {order.certificate_uri} is empty!"
|
||||||
|
)
|
||||||
|
|
||||||
|
alternate_chains = None
|
||||||
|
if download_all_chains:
|
||||||
|
alternate_chains = self._download_alternate_chains(cert)
|
||||||
|
|
||||||
|
return cert, alternate_chains
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def get_certificate(
|
||||||
|
self, order: Order, *, download_all_chains: t.Literal[True] = True
|
||||||
|
) -> tuple[CertificateChain, list[CertificateChain] | None]: ...
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def get_certificate(
|
||||||
|
self, order: Order, *, download_all_chains: t.Literal[False]
|
||||||
|
) -> tuple[CertificateChain, list[CertificateChain] | None]: ...
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def get_certificate(
|
||||||
|
self, order: Order, *, download_all_chains: bool = True
|
||||||
|
) -> tuple[CertificateChain, list[CertificateChain] | None]: ...
|
||||||
|
|
||||||
|
def get_certificate(
|
||||||
|
self, order: Order, *, download_all_chains: bool = True
|
||||||
|
) -> tuple[CertificateChain, list[CertificateChain] | None]:
|
||||||
|
"""
|
||||||
|
Request a new certificate and downloads it, and optionally all certificate chains.
|
||||||
|
First verifies whether all authorizations are valid; if not, aborts with an error.
|
||||||
|
"""
|
||||||
|
if self.csr is None and self.csr_content is None:
|
||||||
|
raise ModuleFailException("No CSR has been provided")
|
||||||
|
for authz in order.authorizations.values():
|
||||||
|
if authz.status != "valid":
|
||||||
|
authz.raise_error(
|
||||||
|
error_msg=f'Status is {authz.status!r} and not "valid"',
|
||||||
|
module=self.module,
|
||||||
|
)
|
||||||
|
|
||||||
|
order.finalize(
|
||||||
|
client=self.client,
|
||||||
|
csr_der=pem_to_der(pem_filename=self.csr, pem_content=self.csr_content),
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.download_certificate(order, download_all_chains=download_all_chains)
|
||||||
|
|
||||||
|
def find_matching_chain(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
chains: list[CertificateChain],
|
||||||
|
select_chain_matcher: t.Iterable[ChainMatcher],
|
||||||
|
) -> CertificateChain | None:
|
||||||
|
for criterium_idx, matcher in enumerate(select_chain_matcher):
|
||||||
|
for chain in chains:
|
||||||
|
if matcher.match(certificate=chain):
|
||||||
|
self.module.debug(
|
||||||
|
f"Found matching chain for criterium {criterium_idx}"
|
||||||
|
)
|
||||||
|
return chain
|
||||||
|
return None
|
||||||
|
|
||||||
|
def write_cert_chain(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
cert: CertificateChain,
|
||||||
|
cert_dest: str | os.PathLike | None = None,
|
||||||
|
fullchain_dest: str | os.PathLike | None = None,
|
||||||
|
chain_dest: str | os.PathLike | None = None,
|
||||||
|
) -> bool:
|
||||||
|
changed = False
|
||||||
|
if cert.cert is None:
|
||||||
|
raise ValueError("Certificate is not present")
|
||||||
|
|
||||||
|
if cert_dest and write_file(
|
||||||
|
module=self.module, dest=cert_dest, content=cert.cert.encode("utf8")
|
||||||
|
):
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if fullchain_dest and write_file(
|
||||||
|
module=self.module,
|
||||||
|
dest=fullchain_dest,
|
||||||
|
content=(cert.cert + "\n".join(cert.chain)).encode("utf8"),
|
||||||
|
):
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if chain_dest and write_file(
|
||||||
|
module=self.module,
|
||||||
|
dest=chain_dest,
|
||||||
|
content=("\n".join(cert.chain)).encode("utf8"),
|
||||||
|
):
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
return changed
|
||||||
|
|
||||||
|
def deactivate_authzs(self, order: Order) -> None:
|
||||||
|
"""
|
||||||
|
Deactivates all valid authz's. Does not raise exceptions.
|
||||||
|
https://community.letsencrypt.org/t/authorization-deactivation/19860/2
|
||||||
|
https://tools.ietf.org/html/rfc8555#section-7.5.2
|
||||||
|
"""
|
||||||
|
if len(order.authorization_uris) > len(order.authorizations):
|
||||||
|
for authz_uri in order.authorization_uris:
|
||||||
|
authz = None
|
||||||
|
try:
|
||||||
|
authz = Authorization.deactivate_url(
|
||||||
|
client=self.client, url=authz_uri
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
# ignore errors
|
||||||
|
pass
|
||||||
|
if authz is None or authz.status != "deactivated":
|
||||||
|
self.module.warn(
|
||||||
|
warning=f"Could not deactivate authz object {authz_uri}."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
for authz in order.authorizations.values():
|
||||||
|
try:
|
||||||
|
authz.deactivate(client=self.client)
|
||||||
|
except Exception:
|
||||||
|
# ignore errors
|
||||||
|
pass
|
||||||
|
if authz.status != "deactivated":
|
||||||
|
self.module.warn(
|
||||||
|
warning=f"Could not deactivate authz object {authz.url}."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("ACMECertificateClient",)
|
||||||
132
plugins/module_utils/_acme/certificates.py
Normal file
132
plugins/module_utils/_acme/certificates.py
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
# Copyright (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
|
||||||
|
# Copyright (c) 2021 Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.errors import (
|
||||||
|
ModuleFailException,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.utils import (
|
||||||
|
der_to_pem,
|
||||||
|
process_links,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.pem import (
|
||||||
|
split_pem_list,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.acme import (
|
||||||
|
ACMEClient,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_CertificateChain = t.TypeVar("_CertificateChain", bound="CertificateChain")
|
||||||
|
|
||||||
|
|
||||||
|
class CertificateChain:
|
||||||
|
"""
|
||||||
|
Download and parse the certificate chain.
|
||||||
|
https://tools.ietf.org/html/rfc8555#section-7.4.2
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, url: str):
|
||||||
|
self.url = url
|
||||||
|
self.cert: str | None = None
|
||||||
|
self.chain: list[str] = []
|
||||||
|
self.alternates: list[str] = []
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def download(
|
||||||
|
cls: t.Type[_CertificateChain], *, client: ACMEClient, url: str
|
||||||
|
) -> _CertificateChain:
|
||||||
|
content, info = client.get_request(
|
||||||
|
url,
|
||||||
|
parse_json_result=False,
|
||||||
|
headers={"Accept": "application/pem-certificate-chain"},
|
||||||
|
)
|
||||||
|
|
||||||
|
if not content or not info["content-type"].startswith(
|
||||||
|
"application/pem-certificate-chain"
|
||||||
|
):
|
||||||
|
raise ModuleFailException(
|
||||||
|
f"Cannot download certificate chain from {url}, as content type is not application/pem-certificate-chain: {content!r} (headers: {info})"
|
||||||
|
)
|
||||||
|
|
||||||
|
result = cls(url)
|
||||||
|
|
||||||
|
# Parse data
|
||||||
|
certs = split_pem_list(content.decode("utf-8"), keep_inbetween=True)
|
||||||
|
if certs:
|
||||||
|
result.cert = certs[0]
|
||||||
|
result.chain = certs[1:]
|
||||||
|
|
||||||
|
process_links(
|
||||||
|
info=info,
|
||||||
|
callback=lambda link, relation: result._process_links( # pylint: disable=protected-access
|
||||||
|
client=client, link=link, relation=relation
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.cert is None:
|
||||||
|
raise ModuleFailException(
|
||||||
|
f"Failed to parse certificate chain download from {url}: {content!r} (headers: {info})"
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _process_links(self, *, client: ACMEClient, link: str, relation: str) -> None:
|
||||||
|
if relation == "up":
|
||||||
|
# Process link-up headers if there was no chain in reply
|
||||||
|
if not self.chain:
|
||||||
|
chain_result, chain_info = client.get_request(
|
||||||
|
link, parse_json_result=False
|
||||||
|
)
|
||||||
|
if chain_info["status"] in [200, 201]:
|
||||||
|
self.chain.append(der_to_pem(chain_result))
|
||||||
|
elif relation == "alternate":
|
||||||
|
self.alternates.append(link)
|
||||||
|
|
||||||
|
def to_json(self) -> dict[str, bytes]:
|
||||||
|
if self.cert is None:
|
||||||
|
raise ValueError("Has no certificate")
|
||||||
|
cert = self.cert.encode("utf8")
|
||||||
|
chain = ("\n".join(self.chain)).encode("utf8")
|
||||||
|
return {
|
||||||
|
"cert": cert,
|
||||||
|
"chain": chain,
|
||||||
|
"full_chain": cert + chain,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Criterium:
|
||||||
|
def __init__(self, *, criterium: dict[str, t.Any], index: int):
|
||||||
|
self.index = index
|
||||||
|
self.test_certificates: t.Literal["first", "last", "all"] = criterium[
|
||||||
|
"test_certificates"
|
||||||
|
]
|
||||||
|
self.subject: dict[str, t.Any] | None = criterium["subject"]
|
||||||
|
self.issuer: dict[str, t.Any] | None = criterium["issuer"]
|
||||||
|
self.subject_key_identifier: str | None = criterium["subject_key_identifier"]
|
||||||
|
self.authority_key_identifier: str | None = criterium[
|
||||||
|
"authority_key_identifier"
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ChainMatcher(metaclass=abc.ABCMeta):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def match(self, *, certificate: CertificateChain) -> bool:
|
||||||
|
"""
|
||||||
|
Check whether a certificate chain (CertificateChain instance) matches.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("CertificateChain", "Criterium", "ChainMatcher")
|
||||||
414
plugins/module_utils/_acme/challenges.py
Normal file
414
plugins/module_utils/_acme/challenges.py
Normal file
@@ -0,0 +1,414 @@
|
|||||||
|
# Copyright (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
|
||||||
|
# Copyright (c) 2021 Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import ipaddress
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.errors import (
|
||||||
|
ACMEProtocolException,
|
||||||
|
ModuleFailException,
|
||||||
|
format_error_problem,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.utils import (
|
||||||
|
nopad_b64,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.acme import (
|
||||||
|
ACMEClient,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_key_authorization(*, client: ACMEClient, token: str) -> str:
|
||||||
|
"""
|
||||||
|
Returns the key authorization for the given token
|
||||||
|
https://tools.ietf.org/html/rfc8555#section-8.1
|
||||||
|
"""
|
||||||
|
accountkey_json = json.dumps(
|
||||||
|
client.account_jwk, sort_keys=True, separators=(",", ":")
|
||||||
|
)
|
||||||
|
thumbprint = nopad_b64(hashlib.sha256(accountkey_json.encode("utf8")).digest())
|
||||||
|
return f"{token}.{thumbprint}"
|
||||||
|
|
||||||
|
|
||||||
|
def combine_identifier(*, identifier_type: str, identifier: str) -> str:
|
||||||
|
return f"{identifier_type}:{identifier}"
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_combined_identifier(identifier: str) -> str:
|
||||||
|
identifier_type, identifier = split_identifier(identifier)
|
||||||
|
# Normalize DNS names and IPs
|
||||||
|
identifier = identifier.lower()
|
||||||
|
return combine_identifier(identifier_type=identifier_type, identifier=identifier)
|
||||||
|
|
||||||
|
|
||||||
|
def split_identifier(identifier: str) -> tuple[str, str]:
|
||||||
|
parts = identifier.split(":", 1)
|
||||||
|
if len(parts) != 2:
|
||||||
|
raise ModuleFailException(
|
||||||
|
f'Identifier "{identifier}" is not of the form <type>:<identifier>'
|
||||||
|
)
|
||||||
|
return parts[0], parts[1]
|
||||||
|
|
||||||
|
|
||||||
|
_Challenge = t.TypeVar("_Challenge", bound="Challenge")
|
||||||
|
|
||||||
|
|
||||||
|
class Challenge:
|
||||||
|
def __init__(self, *, data: dict[str, t.Any], url: str) -> None:
|
||||||
|
self.data = data
|
||||||
|
|
||||||
|
self.type: str = data["type"]
|
||||||
|
self.url = url
|
||||||
|
self.status: str = data["status"]
|
||||||
|
self.token: str | None = data.get("token")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_json(
|
||||||
|
cls: t.Type[_Challenge],
|
||||||
|
*,
|
||||||
|
client: ACMEClient,
|
||||||
|
data: dict[str, t.Any],
|
||||||
|
url: str | None = None,
|
||||||
|
) -> _Challenge:
|
||||||
|
return cls(data=data, url=url or data["url"])
|
||||||
|
|
||||||
|
def call_validate(self, client: ACMEClient) -> None:
|
||||||
|
challenge_response: dict[str, t.Any] = {}
|
||||||
|
client.send_signed_request(
|
||||||
|
self.url,
|
||||||
|
challenge_response,
|
||||||
|
error_msg="Failed to validate challenge",
|
||||||
|
expected_status_codes=[200, 202],
|
||||||
|
)
|
||||||
|
|
||||||
|
def to_json(self) -> dict[str, t.Any]:
|
||||||
|
return self.data.copy()
|
||||||
|
|
||||||
|
def get_validation_data(
|
||||||
|
self, *, client: ACMEClient, identifier_type: str, identifier: str
|
||||||
|
) -> dict[str, t.Any] | None:
|
||||||
|
if self.token is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
token = re.sub(r"[^A-Za-z0-9_\-]", "_", self.token)
|
||||||
|
key_authorization = create_key_authorization(client=client, token=token)
|
||||||
|
|
||||||
|
if self.type == "http-01":
|
||||||
|
# https://tools.ietf.org/html/rfc8555#section-8.3
|
||||||
|
return {
|
||||||
|
"resource": f".well-known/acme-challenge/{token}",
|
||||||
|
"resource_value": key_authorization,
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.type == "dns-01":
|
||||||
|
if identifier_type != "dns":
|
||||||
|
return None
|
||||||
|
# https://tools.ietf.org/html/rfc8555#section-8.4
|
||||||
|
resource = "_acme-challenge"
|
||||||
|
value = nopad_b64(hashlib.sha256(to_bytes(key_authorization)).digest())
|
||||||
|
record = f"{resource}.{identifier[2:] if identifier.startswith('*.') else identifier}"
|
||||||
|
return {
|
||||||
|
"resource": resource,
|
||||||
|
"resource_value": value,
|
||||||
|
"record": record,
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.type == "tls-alpn-01":
|
||||||
|
# https://www.rfc-editor.org/rfc/rfc8737.html#section-3
|
||||||
|
if identifier_type == "ip":
|
||||||
|
# IPv4/IPv6 address: use reverse mapping (RFC1034, RFC3596)
|
||||||
|
resource = ipaddress.ip_address(identifier).reverse_pointer
|
||||||
|
if not resource.endswith("."):
|
||||||
|
resource += "."
|
||||||
|
else:
|
||||||
|
resource = identifier
|
||||||
|
b_value = base64.b64encode(
|
||||||
|
hashlib.sha256(to_bytes(key_authorization)).digest()
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"resource": resource,
|
||||||
|
"resource_original": combine_identifier(
|
||||||
|
identifier_type=identifier_type, identifier=identifier
|
||||||
|
),
|
||||||
|
"resource_value": b_value,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Unknown challenge type: ignore
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
_Authorization = t.TypeVar("_Authorization", bound="Authorization")
|
||||||
|
|
||||||
|
|
||||||
|
class Authorization:
|
||||||
|
def __init__(self, *, url: str) -> None:
|
||||||
|
self.url = url
|
||||||
|
|
||||||
|
self.data: dict[str, t.Any] | None = None
|
||||||
|
self.challenges: list[Challenge] = []
|
||||||
|
self.status: str | None = None
|
||||||
|
self.identifier_type: str | None = None
|
||||||
|
self.identifier: str | None = None
|
||||||
|
|
||||||
|
def _setup(self, *, client: ACMEClient, data: dict[str, t.Any]) -> None:
|
||||||
|
data["uri"] = self.url
|
||||||
|
self.data = data
|
||||||
|
# While 'challenges' is a required field, apparently not every CA cares
|
||||||
|
# (https://github.com/ansible-collections/community.crypto/issues/824)
|
||||||
|
if data.get("challenges"):
|
||||||
|
self.challenges = [
|
||||||
|
Challenge.from_json(client=client, data=challenge)
|
||||||
|
for challenge in data["challenges"]
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
self.challenges = []
|
||||||
|
self.status = data["status"]
|
||||||
|
self.identifier = data["identifier"]["value"]
|
||||||
|
self.identifier_type = data["identifier"]["type"]
|
||||||
|
if data.get("wildcard", False):
|
||||||
|
self.identifier = f"*.{self.identifier}"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_json(
|
||||||
|
cls: t.Type[_Authorization],
|
||||||
|
*,
|
||||||
|
client: ACMEClient,
|
||||||
|
data: dict[str, t.Any],
|
||||||
|
url: str,
|
||||||
|
) -> _Authorization:
|
||||||
|
result = cls(url=url)
|
||||||
|
result._setup(client=client, data=data)
|
||||||
|
return result
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_url(
|
||||||
|
cls: t.Type[_Authorization], *, client: ACMEClient, url: str
|
||||||
|
) -> _Authorization:
|
||||||
|
result = cls(url=url)
|
||||||
|
result.refresh(client=client)
|
||||||
|
return result
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create(
|
||||||
|
cls: t.Type[_Authorization],
|
||||||
|
*,
|
||||||
|
client: ACMEClient,
|
||||||
|
identifier_type: str,
|
||||||
|
identifier: str,
|
||||||
|
) -> _Authorization:
|
||||||
|
"""
|
||||||
|
Create a new authorization for the given identifier.
|
||||||
|
Return the authorization object of the new authorization
|
||||||
|
https://tools.ietf.org/html/draft-ietf-acme-acme-02#section-6.4
|
||||||
|
"""
|
||||||
|
new_authz = {
|
||||||
|
"identifier": {
|
||||||
|
"type": identifier_type,
|
||||||
|
"value": identifier,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if "newAuthz" not in client.directory.directory:
|
||||||
|
raise ACMEProtocolException(
|
||||||
|
module=client.module,
|
||||||
|
msg="ACME endpoint does not support pre-authorization",
|
||||||
|
)
|
||||||
|
url = client.directory["newAuthz"]
|
||||||
|
|
||||||
|
result, info = client.send_signed_request(
|
||||||
|
url,
|
||||||
|
new_authz,
|
||||||
|
error_msg="Failed to request challenges",
|
||||||
|
expected_status_codes=[200, 201],
|
||||||
|
)
|
||||||
|
return cls.from_json(client=client, data=result, url=info["location"])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def combined_identifier(self) -> str:
|
||||||
|
if self.identifier_type is None or self.identifier is None:
|
||||||
|
raise ValueError("Data not present")
|
||||||
|
return combine_identifier(
|
||||||
|
identifier_type=self.identifier_type, identifier=self.identifier
|
||||||
|
)
|
||||||
|
|
||||||
|
def to_json(self) -> dict[str, t.Any]:
|
||||||
|
if self.data is None:
|
||||||
|
raise ValueError("Data not present")
|
||||||
|
return self.data.copy()
|
||||||
|
|
||||||
|
def refresh(self, *, client: ACMEClient) -> bool:
|
||||||
|
result, dummy = client.get_request(self.url)
|
||||||
|
changed = self.data != result
|
||||||
|
self._setup(client=client, data=result)
|
||||||
|
return changed
|
||||||
|
|
||||||
|
def get_challenge_data(self, *, client: ACMEClient) -> dict[str, t.Any]:
|
||||||
|
"""
|
||||||
|
Returns a dict with the data for all proposed (and supported) challenges
|
||||||
|
of the given authorization.
|
||||||
|
"""
|
||||||
|
if self.identifier_type is None or self.identifier is None:
|
||||||
|
raise ValueError("Data not present")
|
||||||
|
data = {}
|
||||||
|
for challenge in self.challenges:
|
||||||
|
validation_data = challenge.get_validation_data(
|
||||||
|
client=client,
|
||||||
|
identifier_type=self.identifier_type,
|
||||||
|
identifier=self.identifier,
|
||||||
|
)
|
||||||
|
if validation_data is not None:
|
||||||
|
data[challenge.type] = validation_data
|
||||||
|
return data
|
||||||
|
|
||||||
|
def raise_error(self, *, error_msg: str, module: AnsibleModule) -> t.NoReturn:
|
||||||
|
"""
|
||||||
|
Aborts with a specific error for a challenge.
|
||||||
|
"""
|
||||||
|
error_details = []
|
||||||
|
# multiple challenges could have failed at this point, gather error
|
||||||
|
# details for all of them before failing
|
||||||
|
for challenge in self.challenges:
|
||||||
|
if challenge.status == "invalid":
|
||||||
|
msg = f"Challenge {challenge.type}"
|
||||||
|
if "error" in challenge.data:
|
||||||
|
problem = format_error_problem(
|
||||||
|
challenge.data["error"],
|
||||||
|
subproblem_prefix=f"{challenge.type}.",
|
||||||
|
)
|
||||||
|
msg = f"{msg}: {problem}"
|
||||||
|
error_details.append(msg)
|
||||||
|
raise ACMEProtocolException(
|
||||||
|
module=module,
|
||||||
|
msg=f"Failed to validate challenge for {self.combined_identifier}: {error_msg}. {'; '.join(error_details)}",
|
||||||
|
extras={
|
||||||
|
"identifier": self.combined_identifier,
|
||||||
|
"authorization": self.data,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def find_challenge(self, *, challenge_type: str) -> Challenge | None:
|
||||||
|
for challenge in self.challenges:
|
||||||
|
if challenge_type == challenge.type:
|
||||||
|
return challenge
|
||||||
|
return None
|
||||||
|
|
||||||
|
def wait_for_validation(self, *, client: ACMEClient) -> bool:
|
||||||
|
while True:
|
||||||
|
self.refresh(client=client)
|
||||||
|
if self.status in ["valid", "invalid", "revoked"]:
|
||||||
|
break
|
||||||
|
time.sleep(2)
|
||||||
|
|
||||||
|
if self.status == "invalid":
|
||||||
|
self.raise_error(error_msg='Status is "invalid"', module=client.module)
|
||||||
|
|
||||||
|
return self.status == "valid"
|
||||||
|
|
||||||
|
def call_validate(
|
||||||
|
self, *, client: ACMEClient, challenge_type: str, wait: bool = True
|
||||||
|
) -> bool:
|
||||||
|
"""
|
||||||
|
Validate the authorization provided in the auth dict. Returns True
|
||||||
|
when the validation was successful and False when it was not.
|
||||||
|
"""
|
||||||
|
challenge = self.find_challenge(challenge_type=challenge_type)
|
||||||
|
if challenge is None:
|
||||||
|
raise ModuleFailException(
|
||||||
|
f'Found no challenge of type "{challenge_type}" for identifier {self.combined_identifier}!'
|
||||||
|
)
|
||||||
|
|
||||||
|
challenge.call_validate(client)
|
||||||
|
|
||||||
|
if not wait:
|
||||||
|
return self.status == "valid"
|
||||||
|
return self.wait_for_validation(client=client)
|
||||||
|
|
||||||
|
def can_deactivate(self) -> bool:
|
||||||
|
"""
|
||||||
|
Deactivates this authorization.
|
||||||
|
https://community.letsencrypt.org/t/authorization-deactivation/19860/2
|
||||||
|
https://tools.ietf.org/html/rfc8555#section-7.5.2
|
||||||
|
"""
|
||||||
|
return self.status in ("valid", "pending")
|
||||||
|
|
||||||
|
def deactivate(self, *, client: ACMEClient) -> bool | None:
|
||||||
|
"""
|
||||||
|
Deactivates this authorization.
|
||||||
|
https://community.letsencrypt.org/t/authorization-deactivation/19860/2
|
||||||
|
https://tools.ietf.org/html/rfc8555#section-7.5.2
|
||||||
|
"""
|
||||||
|
if not self.can_deactivate():
|
||||||
|
return None
|
||||||
|
authz_deactivate = {"status": "deactivated"}
|
||||||
|
result, info = client.send_signed_request(
|
||||||
|
self.url, authz_deactivate, fail_on_error=False
|
||||||
|
)
|
||||||
|
if 200 <= info["status"] < 300 and result.get("status") == "deactivated":
|
||||||
|
self.status = "deactivated"
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def deactivate_url(
|
||||||
|
cls: t.Type[_Authorization], *, client: ACMEClient, url: str
|
||||||
|
) -> _Authorization:
|
||||||
|
"""
|
||||||
|
Deactivates this authorization.
|
||||||
|
https://community.letsencrypt.org/t/authorization-deactivation/19860/2
|
||||||
|
https://tools.ietf.org/html/rfc8555#section-7.5.2
|
||||||
|
"""
|
||||||
|
authz = cls(url=url)
|
||||||
|
authz_deactivate = {"status": "deactivated"}
|
||||||
|
result, _info = client.send_signed_request(
|
||||||
|
url, authz_deactivate, fail_on_error=True
|
||||||
|
)
|
||||||
|
authz._setup(client=client, data=result)
|
||||||
|
return authz
|
||||||
|
|
||||||
|
|
||||||
|
def wait_for_validation(
|
||||||
|
*, authzs: t.Iterable[Authorization], client: ACMEClient
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Wait until a list of authz is valid. Fail if at least one of them is invalid or revoked.
|
||||||
|
"""
|
||||||
|
while authzs:
|
||||||
|
authzs_next = []
|
||||||
|
for authz in authzs:
|
||||||
|
authz.refresh(client=client)
|
||||||
|
if authz.status in ["valid", "invalid", "revoked"]:
|
||||||
|
if authz.status != "valid":
|
||||||
|
authz.raise_error(
|
||||||
|
error_msg='Status is not "valid"', module=client.module
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
authzs_next.append(authz)
|
||||||
|
if authzs_next:
|
||||||
|
time.sleep(2)
|
||||||
|
authzs = authzs_next
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"create_key_authorization",
|
||||||
|
"combine_identifier",
|
||||||
|
"normalize_combined_identifier",
|
||||||
|
"split_identifier",
|
||||||
|
"Challenge",
|
||||||
|
"Authorization",
|
||||||
|
"wait_for_validation",
|
||||||
|
)
|
||||||
184
plugins/module_utils/_acme/errors.py
Normal file
184
plugins/module_utils/_acme/errors.py
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
# Copyright (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
|
||||||
|
# Copyright (c) 2021 Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import typing as t
|
||||||
|
from http.client import responses as http_responses
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
|
||||||
|
|
||||||
|
def format_http_status(status_code: int) -> str:
|
||||||
|
expl = http_responses.get(status_code)
|
||||||
|
if not expl:
|
||||||
|
return str(status_code)
|
||||||
|
return f"{status_code} {expl}"
|
||||||
|
|
||||||
|
|
||||||
|
def format_error_problem(
|
||||||
|
problem: dict[str, t.Any], *, subproblem_prefix: str = ""
|
||||||
|
) -> str:
|
||||||
|
error_type = problem.get(
|
||||||
|
"type", "about:blank"
|
||||||
|
) # https://www.rfc-editor.org/rfc/rfc7807#section-3.1
|
||||||
|
if "title" in problem:
|
||||||
|
msg = f'Error "{problem["title"]}" ({error_type})'
|
||||||
|
else:
|
||||||
|
msg = f"Error {error_type}"
|
||||||
|
if "detail" in problem:
|
||||||
|
msg += f': "{problem["detail"]}"'
|
||||||
|
subproblems = problem.get("subproblems")
|
||||||
|
if subproblems is not None:
|
||||||
|
msg = f"{msg} Subproblems:"
|
||||||
|
for index, subproblem in enumerate(subproblems):
|
||||||
|
index_str = f"{subproblem_prefix}{index}"
|
||||||
|
subproblem_str = format_error_problem(
|
||||||
|
subproblem, subproblem_prefix=f"{index_str}."
|
||||||
|
)
|
||||||
|
msg = f"{msg}\n({index_str}) {subproblem_str}"
|
||||||
|
return msg
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleFailException(Exception):
|
||||||
|
"""
|
||||||
|
If raised, module.fail_json() will be called with the given parameters after cleanup.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, msg: str, **args: t.Any) -> None:
|
||||||
|
super().__init__(self, msg)
|
||||||
|
self.msg = msg
|
||||||
|
self.module_fail_args = args
|
||||||
|
|
||||||
|
def do_fail(self, *, module: AnsibleModule, **arguments) -> t.NoReturn:
|
||||||
|
module.fail_json(msg=self.msg, other=self.module_fail_args, **arguments)
|
||||||
|
|
||||||
|
|
||||||
|
class ACMEProtocolException(ModuleFailException):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
module: AnsibleModule,
|
||||||
|
msg: str | None = None,
|
||||||
|
info: dict[str, t.Any] | None = None,
|
||||||
|
response=None,
|
||||||
|
content: bytes | None = None,
|
||||||
|
content_json: dict[str, t.Any] | None = None,
|
||||||
|
extras: dict[str, t.Any] | None = None,
|
||||||
|
):
|
||||||
|
# Try to get hold of content, if response is given and content is not provided
|
||||||
|
if content is None and content_json is None and response is not None:
|
||||||
|
try:
|
||||||
|
# In Python 2, reading from a closed response yields a TypeError.
|
||||||
|
# In Python 3, read() simply returns ''
|
||||||
|
if response.closed:
|
||||||
|
raise TypeError
|
||||||
|
content = response.read()
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
if info is not None:
|
||||||
|
content = info.pop("body", None)
|
||||||
|
|
||||||
|
# Make sure that content_json is None or a dictionary
|
||||||
|
if content_json is not None and not isinstance(content_json, dict):
|
||||||
|
if content is None and isinstance(content_json, bytes):
|
||||||
|
content = content_json
|
||||||
|
content_json = None
|
||||||
|
|
||||||
|
# Try to get hold of JSON decoded content, when content is given and JSON not provided
|
||||||
|
if content_json is None and content is not None and module is not None:
|
||||||
|
try:
|
||||||
|
content_json = module.from_json(to_text(content))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
extras = extras or {}
|
||||||
|
error_code = None
|
||||||
|
error_type = None
|
||||||
|
|
||||||
|
if msg is None:
|
||||||
|
msg = "ACME request failed"
|
||||||
|
add_msg = ""
|
||||||
|
|
||||||
|
if info is not None:
|
||||||
|
url = info["url"]
|
||||||
|
code = info["status"]
|
||||||
|
extras["http_url"] = url
|
||||||
|
extras["http_status"] = code
|
||||||
|
error_code = code
|
||||||
|
if (
|
||||||
|
code is not None
|
||||||
|
and code >= 400
|
||||||
|
and content_json is not None
|
||||||
|
and "type" in content_json
|
||||||
|
):
|
||||||
|
error_type = content_json["type"]
|
||||||
|
if "status" in content_json and content_json["status"] != code:
|
||||||
|
code_msg = f"status {content_json['status']} (HTTP status: {format_http_status(code)})"
|
||||||
|
else:
|
||||||
|
code_msg = f"status {format_http_status(code)}"
|
||||||
|
if code == -1 and info.get("msg"):
|
||||||
|
code_msg = f"error: {info['msg']}"
|
||||||
|
subproblems = content_json.pop("subproblems", None)
|
||||||
|
add_msg = f" {format_error_problem(content_json)}."
|
||||||
|
extras["problem"] = content_json
|
||||||
|
extras["subproblems"] = subproblems or []
|
||||||
|
if subproblems is not None:
|
||||||
|
add_msg = f"{add_msg} Subproblems:"
|
||||||
|
for index, problem in enumerate(subproblems):
|
||||||
|
problem = format_error_problem(
|
||||||
|
problem, subproblem_prefix=f"{index}."
|
||||||
|
)
|
||||||
|
add_msg = f"{add_msg}\n({index}) {problem}."
|
||||||
|
else:
|
||||||
|
code_msg = f"HTTP status {format_http_status(code)}"
|
||||||
|
if code == -1 and info.get("msg"):
|
||||||
|
code_msg = f"error: {info['msg']}"
|
||||||
|
if content_json is not None:
|
||||||
|
add_msg = f" The JSON error result: {content_json}"
|
||||||
|
elif content is not None:
|
||||||
|
add_msg = f" The raw error result: {to_text(content)}"
|
||||||
|
msg = f"{msg} for {url} with {code_msg}"
|
||||||
|
elif content_json is not None:
|
||||||
|
add_msg = f" The JSON result: {content_json}"
|
||||||
|
elif content is not None:
|
||||||
|
add_msg = f" The raw result: {to_text(content)}"
|
||||||
|
|
||||||
|
super().__init__(f"{msg}.{add_msg}", **extras)
|
||||||
|
self.problem: dict[str, t.Any] = {}
|
||||||
|
self.subproblems: list[dict[str, t.Any]] = []
|
||||||
|
self.error_code = error_code
|
||||||
|
self.error_type = error_type
|
||||||
|
for k, v in extras.items():
|
||||||
|
setattr(self, k, v)
|
||||||
|
|
||||||
|
|
||||||
|
class BackendException(ModuleFailException):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NetworkException(ModuleFailException):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class KeyParsingError(ModuleFailException):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"format_http_status",
|
||||||
|
"format_error_problem",
|
||||||
|
"ModuleFailException",
|
||||||
|
"ACMEProtocolException",
|
||||||
|
"BackendException",
|
||||||
|
"NetworkException",
|
||||||
|
"KeyParsingError",
|
||||||
|
)
|
||||||
@@ -1,52 +1,61 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# Copyright (c) 2013, Romeo Theriault <romeot () hawaii.edu>
|
# Copyright (c) 2013, Romeo Theriault <romeot () hawaii.edu>
|
||||||
# Copyright (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
|
# Copyright (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
|
||||||
# Copyright (c) 2021 Felix Fontein <felix@fontein.de>
|
# Copyright (c) 2021 Felix Fontein <felix@fontein.de>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
__metaclass__ = type
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import traceback
|
import traceback
|
||||||
|
import typing as t
|
||||||
|
|
||||||
from ansible.module_utils.common.text.converters import to_native
|
from ansible_collections.community.crypto.plugins.module_utils._acme.errors import (
|
||||||
|
ModuleFailException,
|
||||||
from ansible_collections.community.crypto.plugins.module_utils.acme.errors import ModuleFailException
|
)
|
||||||
|
|
||||||
|
|
||||||
def read_file(fn, mode='b'):
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
|
||||||
|
|
||||||
|
def read_file(fn: str | os.PathLike) -> bytes:
|
||||||
try:
|
try:
|
||||||
with open(fn, 'r' + mode) as f:
|
with open(fn, "rb") as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise ModuleFailException('Error while reading file "{0}": {1}'.format(fn, e))
|
raise ModuleFailException(f'Error while reading file "{fn}": {e}') from e
|
||||||
|
|
||||||
|
|
||||||
# This function was adapted from an earlier version of https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/uri.py
|
# This function was adapted from an earlier version of https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/uri.py
|
||||||
def write_file(module, dest, content):
|
def write_file(
|
||||||
'''
|
*, module: AnsibleModule, dest: str | os.PathLike, content: bytes
|
||||||
|
) -> bool:
|
||||||
|
"""
|
||||||
Write content to destination file dest, only if the content
|
Write content to destination file dest, only if the content
|
||||||
has changed.
|
has changed.
|
||||||
'''
|
"""
|
||||||
changed = False
|
changed = False
|
||||||
# create a tempfile
|
# create a tempfile
|
||||||
fd, tmpsrc = tempfile.mkstemp(text=False)
|
fd, tmpsrc = tempfile.mkstemp(text=False)
|
||||||
f = os.fdopen(fd, 'wb')
|
f = os.fdopen(fd, "wb")
|
||||||
try:
|
try:
|
||||||
f.write(content)
|
f.write(content)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
try:
|
try:
|
||||||
f.close()
|
f.close()
|
||||||
except Exception as dummy:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
os.remove(tmpsrc)
|
os.remove(tmpsrc)
|
||||||
raise ModuleFailException("failed to create temporary content file: %s" % to_native(err), exception=traceback.format_exc())
|
raise ModuleFailException(
|
||||||
|
f"failed to create temporary content file: {err}",
|
||||||
|
exception=traceback.format_exc(),
|
||||||
|
) from err
|
||||||
f.close()
|
f.close()
|
||||||
checksum_src = None
|
checksum_src = None
|
||||||
checksum_dest = None
|
checksum_dest = None
|
||||||
@@ -54,34 +63,40 @@ def write_file(module, dest, content):
|
|||||||
if not os.path.exists(tmpsrc):
|
if not os.path.exists(tmpsrc):
|
||||||
try:
|
try:
|
||||||
os.remove(tmpsrc)
|
os.remove(tmpsrc)
|
||||||
except Exception as dummy:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
raise ModuleFailException("Source %s does not exist" % (tmpsrc))
|
raise ModuleFailException(f"Source {tmpsrc} does not exist")
|
||||||
if not os.access(tmpsrc, os.R_OK):
|
if not os.access(tmpsrc, os.R_OK):
|
||||||
os.remove(tmpsrc)
|
os.remove(tmpsrc)
|
||||||
raise ModuleFailException("Source %s not readable" % (tmpsrc))
|
raise ModuleFailException(f"Source {tmpsrc} not readable")
|
||||||
checksum_src = module.sha1(tmpsrc)
|
checksum_src = module.sha1(tmpsrc)
|
||||||
# check if there is no dest file
|
# check if there is no dest file
|
||||||
if os.path.exists(dest):
|
if os.path.exists(dest):
|
||||||
# raise an error if copy has no permission on dest
|
# raise an error if copy has no permission on dest
|
||||||
if not os.access(dest, os.W_OK):
|
if not os.access(dest, os.W_OK):
|
||||||
os.remove(tmpsrc)
|
os.remove(tmpsrc)
|
||||||
raise ModuleFailException("Destination %s not writable" % (dest))
|
raise ModuleFailException(f"Destination {dest} not writable")
|
||||||
if not os.access(dest, os.R_OK):
|
if not os.access(dest, os.R_OK):
|
||||||
os.remove(tmpsrc)
|
os.remove(tmpsrc)
|
||||||
raise ModuleFailException("Destination %s not readable" % (dest))
|
raise ModuleFailException(f"Destination {dest} not readable")
|
||||||
checksum_dest = module.sha1(dest)
|
checksum_dest = module.sha1(dest)
|
||||||
else:
|
else:
|
||||||
dirname = os.path.dirname(dest) or '.'
|
dirname = os.path.dirname(dest) or "."
|
||||||
if not os.access(dirname, os.W_OK):
|
if not os.access(dirname, os.W_OK):
|
||||||
os.remove(tmpsrc)
|
os.remove(tmpsrc)
|
||||||
raise ModuleFailException("Destination dir %s not writable" % (dirname))
|
raise ModuleFailException(f"Destination dir {dirname} not writable")
|
||||||
if checksum_src != checksum_dest:
|
if checksum_src != checksum_dest:
|
||||||
try:
|
try:
|
||||||
shutil.copyfile(tmpsrc, dest)
|
shutil.copyfile(tmpsrc, dest)
|
||||||
changed = True
|
changed = True
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
os.remove(tmpsrc)
|
os.remove(tmpsrc)
|
||||||
raise ModuleFailException("failed to copy %s to %s: %s" % (tmpsrc, dest, to_native(err)), exception=traceback.format_exc())
|
raise ModuleFailException(
|
||||||
|
f"failed to copy {tmpsrc} to {dest}: {err}",
|
||||||
|
exception=traceback.format_exc(),
|
||||||
|
) from err
|
||||||
os.remove(tmpsrc)
|
os.remove(tmpsrc)
|
||||||
return changed
|
return changed
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("read_file", "write_file")
|
||||||
231
plugins/module_utils/_acme/orders.py
Normal file
231
plugins/module_utils/_acme/orders.py
Normal file
@@ -0,0 +1,231 @@
|
|||||||
|
# Copyright (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
|
||||||
|
# Copyright (c) 2021 Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import time
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.challenges import (
|
||||||
|
Authorization,
|
||||||
|
normalize_combined_identifier,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.errors import (
|
||||||
|
ACMEProtocolException,
|
||||||
|
ModuleFailException,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.utils import (
|
||||||
|
nopad_b64,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.acme import (
|
||||||
|
ACMEClient,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_Order = t.TypeVar("_Order", bound="Order")
|
||||||
|
|
||||||
|
|
||||||
|
class Order:
|
||||||
|
def __init__(self, *, url: str) -> None:
|
||||||
|
self.url = url
|
||||||
|
|
||||||
|
self.data: dict[str, t.Any] | None = None
|
||||||
|
|
||||||
|
self.status = None
|
||||||
|
self.identifiers: list[tuple[str, str]] = []
|
||||||
|
self.replaces_cert_id = None
|
||||||
|
self.finalize_uri = None
|
||||||
|
self.certificate_uri = None
|
||||||
|
self.authorization_uris: list[str] = []
|
||||||
|
self.authorizations: dict[str, Authorization] = {}
|
||||||
|
|
||||||
|
def _setup(self, *, client: ACMEClient, data: dict[str, t.Any]) -> None:
|
||||||
|
self.data = data
|
||||||
|
|
||||||
|
self.status = data["status"]
|
||||||
|
self.identifiers = []
|
||||||
|
for identifier in data["identifiers"]:
|
||||||
|
self.identifiers.append((identifier["type"], identifier["value"]))
|
||||||
|
self.replaces_cert_id = data.get("replaces")
|
||||||
|
self.finalize_uri = data.get("finalize")
|
||||||
|
self.certificate_uri = data.get("certificate")
|
||||||
|
self.authorization_uris = data["authorizations"]
|
||||||
|
self.authorizations = {}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_json(
|
||||||
|
cls: t.Type[_Order], *, client: ACMEClient, data: dict[str, t.Any], url: str
|
||||||
|
) -> _Order:
|
||||||
|
result = cls(url=url)
|
||||||
|
result._setup(client=client, data=data)
|
||||||
|
return result
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_url(cls: t.Type[_Order], *, client: ACMEClient, url: str) -> _Order:
|
||||||
|
result = cls(url=url)
|
||||||
|
result.refresh(client=client)
|
||||||
|
return result
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create(
|
||||||
|
cls: t.Type[_Order],
|
||||||
|
*,
|
||||||
|
client: ACMEClient,
|
||||||
|
identifiers: list[tuple[str, str]],
|
||||||
|
replaces_cert_id: str | None = None,
|
||||||
|
profile: str | None = None,
|
||||||
|
) -> _Order:
|
||||||
|
"""
|
||||||
|
Start a new certificate order (ACME v2 protocol).
|
||||||
|
https://tools.ietf.org/html/rfc8555#section-7.4
|
||||||
|
"""
|
||||||
|
acme_identifiers = []
|
||||||
|
for identifier_type, identifier in identifiers:
|
||||||
|
acme_identifiers.append(
|
||||||
|
{
|
||||||
|
"type": identifier_type,
|
||||||
|
"value": identifier,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
new_order: dict[str, t.Any] = {"identifiers": acme_identifiers}
|
||||||
|
if replaces_cert_id is not None:
|
||||||
|
new_order["replaces"] = replaces_cert_id
|
||||||
|
if profile is not None:
|
||||||
|
new_order["profile"] = profile
|
||||||
|
result, info = client.send_signed_request(
|
||||||
|
client.directory["newOrder"],
|
||||||
|
new_order,
|
||||||
|
error_msg="Failed to start new order",
|
||||||
|
expected_status_codes=[201],
|
||||||
|
)
|
||||||
|
return cls.from_json(client=client, data=result, url=info["location"])
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create_with_error_handling(
|
||||||
|
cls: t.Type[_Order],
|
||||||
|
*,
|
||||||
|
client: ACMEClient,
|
||||||
|
identifiers: list[tuple[str, str]],
|
||||||
|
error_strategy: t.Literal[
|
||||||
|
"auto", "fail", "always", "retry_without_replaces_cert_id"
|
||||||
|
] = "auto",
|
||||||
|
error_max_retries: int = 3,
|
||||||
|
replaces_cert_id: str | None = None,
|
||||||
|
profile: str | None = None,
|
||||||
|
message_callback: t.Callable[[str], None] | None = None,
|
||||||
|
) -> _Order:
|
||||||
|
"""
|
||||||
|
error_strategy can be one of the following strings:
|
||||||
|
|
||||||
|
* ``fail``: simply fail. (Same behavior as ``Order.create()``.)
|
||||||
|
* ``retry_without_replaces_cert_id``: if ``replaces_cert_id`` is not ``None``, set it to ``None`` and retry.
|
||||||
|
The only exception is an error of type ``urn:ietf:params:acme:error:alreadyReplaced``, that indicates that
|
||||||
|
the certificate was already replaced.
|
||||||
|
* ``auto``: try to be clever. Right now this is identical to ``retry_without_replaces_cert_id``, but that can
|
||||||
|
change at any time in the future.
|
||||||
|
* ``always``: always retry until ``error_max_retries`` has been reached.
|
||||||
|
"""
|
||||||
|
tries = 0
|
||||||
|
while True:
|
||||||
|
tries += 1
|
||||||
|
try:
|
||||||
|
return cls.create(
|
||||||
|
client=client,
|
||||||
|
identifiers=identifiers,
|
||||||
|
replaces_cert_id=replaces_cert_id,
|
||||||
|
profile=profile,
|
||||||
|
)
|
||||||
|
except ACMEProtocolException as exc:
|
||||||
|
if tries <= error_max_retries + 1 and error_strategy != "fail":
|
||||||
|
if error_strategy == "always":
|
||||||
|
continue
|
||||||
|
|
||||||
|
if (
|
||||||
|
error_strategy in ("auto", "retry_without_replaces_cert_id")
|
||||||
|
and replaces_cert_id is not None
|
||||||
|
and not (
|
||||||
|
exc.error_code == 409
|
||||||
|
and exc.error_type
|
||||||
|
== "urn:ietf:params:acme:error:alreadyReplaced"
|
||||||
|
)
|
||||||
|
):
|
||||||
|
if message_callback:
|
||||||
|
message_callback(
|
||||||
|
f"Stop passing `replaces={replaces_cert_id}` due to error {exc.error_code} {exc.error_type} when creating ACME order"
|
||||||
|
)
|
||||||
|
replaces_cert_id = None
|
||||||
|
continue
|
||||||
|
|
||||||
|
raise
|
||||||
|
|
||||||
|
def refresh(self, *, client: ACMEClient) -> bool:
|
||||||
|
result, dummy = client.get_request(self.url)
|
||||||
|
changed = self.data != result
|
||||||
|
self._setup(client=client, data=result)
|
||||||
|
return changed
|
||||||
|
|
||||||
|
def load_authorizations(self, *, client: ACMEClient) -> None:
|
||||||
|
for auth_uri in self.authorization_uris:
|
||||||
|
authz = Authorization.from_url(client=client, url=auth_uri)
|
||||||
|
self.authorizations[
|
||||||
|
normalize_combined_identifier(authz.combined_identifier)
|
||||||
|
] = authz
|
||||||
|
|
||||||
|
def wait_for_finalization(self, *, client: ACMEClient) -> None:
|
||||||
|
while True:
|
||||||
|
self.refresh(client=client)
|
||||||
|
if self.status in ["valid", "invalid", "pending", "ready"]:
|
||||||
|
break
|
||||||
|
time.sleep(2)
|
||||||
|
|
||||||
|
if self.status != "valid":
|
||||||
|
raise ACMEProtocolException(
|
||||||
|
module=client.module,
|
||||||
|
msg=f'Failed to wait for order to complete; got status "{self.status}"',
|
||||||
|
content_json=self.data,
|
||||||
|
)
|
||||||
|
|
||||||
|
def finalize(
|
||||||
|
self, *, client: ACMEClient, csr_der: bytes, wait: bool = True
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Create a new certificate based on the csr.
|
||||||
|
Return the certificate object as dict
|
||||||
|
https://tools.ietf.org/html/rfc8555#section-7.4
|
||||||
|
"""
|
||||||
|
if self.finalize_uri is None:
|
||||||
|
raise ModuleFailException("finalize_uri must be set")
|
||||||
|
new_cert = {
|
||||||
|
"csr": nopad_b64(csr_der),
|
||||||
|
}
|
||||||
|
result, info = client.send_signed_request(
|
||||||
|
self.finalize_uri,
|
||||||
|
new_cert,
|
||||||
|
error_msg="Failed to finalizing order",
|
||||||
|
expected_status_codes=[200],
|
||||||
|
)
|
||||||
|
# It is not clear from the RFC whether the finalize call returns the order object or not.
|
||||||
|
# Instead of using the result, we call self.refresh(client) below.
|
||||||
|
|
||||||
|
if wait:
|
||||||
|
self.wait_for_finalization(client=client)
|
||||||
|
else:
|
||||||
|
self.refresh(client=client)
|
||||||
|
if self.status not in ["procesing", "valid", "invalid"]:
|
||||||
|
raise ACMEProtocolException(
|
||||||
|
module=client.module,
|
||||||
|
msg=f'Failed to finalize order; got status "{self.status}"',
|
||||||
|
info=info,
|
||||||
|
content_json=result,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("Order",)
|
||||||
173
plugins/module_utils/_acme/utils.py
Normal file
173
plugins/module_utils/_acme/utils.py
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
# Copyright (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
|
||||||
|
# Copyright (c) 2021 Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import datetime
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import textwrap
|
||||||
|
import traceback
|
||||||
|
import typing as t
|
||||||
|
from urllib.parse import unquote
|
||||||
|
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.errors import (
|
||||||
|
ModuleFailException,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.math import (
|
||||||
|
convert_int_to_bytes,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._time import (
|
||||||
|
get_now_datetime,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._acme.backends import (
|
||||||
|
CertificateInformation,
|
||||||
|
CryptoBackend,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def nopad_b64(data: bytes) -> str:
|
||||||
|
return base64.urlsafe_b64encode(data).decode("utf8").replace("=", "")
|
||||||
|
|
||||||
|
|
||||||
|
def der_to_pem(der_cert: bytes) -> str:
|
||||||
|
"""
|
||||||
|
Convert the DER format certificate in der_cert to a PEM format certificate and return it.
|
||||||
|
"""
|
||||||
|
content = "\n".join(textwrap.wrap(base64.b64encode(der_cert).decode("utf8"), 64))
|
||||||
|
return f"-----BEGIN CERTIFICATE-----\n{content}\n-----END CERTIFICATE-----\n"
|
||||||
|
|
||||||
|
|
||||||
|
def pem_to_der(
|
||||||
|
*, pem_filename: str | os.PathLike | None = None, pem_content: str | None = None
|
||||||
|
) -> bytes:
|
||||||
|
"""
|
||||||
|
Load PEM file, or use PEM file's content, and convert to DER.
|
||||||
|
|
||||||
|
If PEM contains multiple entities, the first entity will be used.
|
||||||
|
"""
|
||||||
|
certificate_lines = []
|
||||||
|
if pem_content is not None:
|
||||||
|
lines = pem_content.splitlines()
|
||||||
|
elif pem_filename is not None:
|
||||||
|
try:
|
||||||
|
with open(pem_filename, "r", encoding="utf-8") as f:
|
||||||
|
lines = list(f)
|
||||||
|
except Exception as err:
|
||||||
|
raise ModuleFailException(
|
||||||
|
f"cannot load PEM file {pem_filename}: {err}",
|
||||||
|
exception=traceback.format_exc(),
|
||||||
|
) from err
|
||||||
|
else:
|
||||||
|
raise ModuleFailException(
|
||||||
|
"One of pem_filename and pem_content must be provided"
|
||||||
|
)
|
||||||
|
header_line_count = 0
|
||||||
|
for line in lines:
|
||||||
|
if line.startswith("-----"):
|
||||||
|
header_line_count += 1
|
||||||
|
if header_line_count == 2:
|
||||||
|
# If certificate file contains other certs appended
|
||||||
|
# (like intermediate certificates), ignore these.
|
||||||
|
break
|
||||||
|
continue
|
||||||
|
certificate_lines.append(line.strip())
|
||||||
|
return base64.b64decode("".join(certificate_lines))
|
||||||
|
|
||||||
|
|
||||||
|
def process_links(
|
||||||
|
*, info: dict[str, t.Any], callback: t.Callable[[str, str], None]
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Process link header, calls callback for every link header with the URL and relation as options.
|
||||||
|
|
||||||
|
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link
|
||||||
|
"""
|
||||||
|
if "link" in info:
|
||||||
|
link = info["link"]
|
||||||
|
for url, relation in re.findall(r'<([^>]+)>;\s*rel="(\w+)"', link):
|
||||||
|
callback(unquote(url), relation)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_retry_after(
|
||||||
|
value: str,
|
||||||
|
*,
|
||||||
|
relative_with_timezone: bool = True,
|
||||||
|
now: datetime.datetime | None = None,
|
||||||
|
) -> datetime.datetime:
|
||||||
|
"""
|
||||||
|
Parse the value of a Retry-After header and return a timestamp.
|
||||||
|
|
||||||
|
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After
|
||||||
|
"""
|
||||||
|
# First try a number of seconds
|
||||||
|
try:
|
||||||
|
delta = datetime.timedelta(seconds=int(value))
|
||||||
|
if now is None:
|
||||||
|
now = get_now_datetime(with_timezone=relative_with_timezone)
|
||||||
|
return now + delta
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
return datetime.datetime.strptime(value, "%a, %d %b %Y %H:%M:%S GMT")
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
raise ValueError(f"Cannot parse Retry-After header value {repr(value)}")
|
||||||
|
|
||||||
|
|
||||||
|
def compute_cert_id(
|
||||||
|
*,
|
||||||
|
backend: CryptoBackend,
|
||||||
|
cert_info: CertificateInformation | None = None,
|
||||||
|
cert_filename: str | os.PathLike | None = None,
|
||||||
|
cert_content: str | bytes | None = None,
|
||||||
|
none_if_required_information_is_missing: bool = False,
|
||||||
|
) -> str | None:
|
||||||
|
# Obtain certificate info if not provided
|
||||||
|
if cert_info is None:
|
||||||
|
cert_info = backend.get_cert_information(
|
||||||
|
cert_filename=cert_filename, cert_content=cert_content
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert Authority Key Identifier to string
|
||||||
|
if cert_info.authority_key_identifier is None:
|
||||||
|
if none_if_required_information_is_missing:
|
||||||
|
return None
|
||||||
|
raise ModuleFailException(
|
||||||
|
"Certificate has no Authority Key Identifier extension"
|
||||||
|
)
|
||||||
|
aki = (
|
||||||
|
(base64.urlsafe_b64encode(cert_info.authority_key_identifier))
|
||||||
|
.decode("ascii")
|
||||||
|
.replace("=", "")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert serial number to string
|
||||||
|
serial_bytes = convert_int_to_bytes(cert_info.serial_number)
|
||||||
|
if ord(serial_bytes[:1]) >= 128:
|
||||||
|
serial_bytes = b"\x00" + serial_bytes
|
||||||
|
serial = (base64.urlsafe_b64encode(serial_bytes)).decode("ascii").replace("=", "")
|
||||||
|
|
||||||
|
# Compose cert ID
|
||||||
|
return f"{aki}.{serial}"
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"nopad_b64",
|
||||||
|
"der_to_pem",
|
||||||
|
"pem_to_der",
|
||||||
|
"process_links",
|
||||||
|
"parse_retry_after",
|
||||||
|
"compute_cert_id",
|
||||||
|
)
|
||||||
124
plugins/module_utils/_argspec.py
Normal file
124
plugins/module_utils/_argspec.py
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
# Copyright (c) 2020, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
|
||||||
|
|
||||||
|
_T = t.TypeVar("_T")
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_list(value: list[_T] | tuple[_T] | None) -> list[_T]:
|
||||||
|
if value is None:
|
||||||
|
return []
|
||||||
|
return list(value)
|
||||||
|
|
||||||
|
|
||||||
|
class ArgumentSpec:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
argument_spec: dict[str, t.Any] | None = None,
|
||||||
|
*,
|
||||||
|
mutually_exclusive: list[list[str] | tuple[str, ...]] | None = None,
|
||||||
|
required_together: list[list[str] | tuple[str, ...]] | None = None,
|
||||||
|
required_one_of: list[list[str] | tuple[str, ...]] | None = None,
|
||||||
|
required_if: (
|
||||||
|
list[
|
||||||
|
tuple[str, t.Any, list[str] | tuple[str, ...]]
|
||||||
|
| tuple[str, t.Any, list[str] | tuple[str, ...], bool]
|
||||||
|
]
|
||||||
|
| None
|
||||||
|
) = None,
|
||||||
|
required_by: dict[str, tuple[str, ...] | list[str]] | None = None,
|
||||||
|
) -> None:
|
||||||
|
self.argument_spec = argument_spec or {}
|
||||||
|
self.mutually_exclusive = _ensure_list(mutually_exclusive)
|
||||||
|
self.required_together = _ensure_list(required_together)
|
||||||
|
self.required_one_of = _ensure_list(required_one_of)
|
||||||
|
self.required_if = _ensure_list(required_if)
|
||||||
|
self.required_by = required_by or {}
|
||||||
|
|
||||||
|
def update_argspec(self, **kwargs) -> t.Self:
|
||||||
|
self.argument_spec.update(kwargs)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def update(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
mutually_exclusive: list[list[str] | tuple[str, ...]] | None = None,
|
||||||
|
required_together: list[list[str] | tuple[str, ...]] | None = None,
|
||||||
|
required_one_of: list[list[str] | tuple[str, ...]] | None = None,
|
||||||
|
required_if: (
|
||||||
|
list[
|
||||||
|
tuple[str, t.Any, list[str] | tuple[str, ...]]
|
||||||
|
| tuple[str, t.Any, list[str] | tuple[str, ...], bool]
|
||||||
|
]
|
||||||
|
| None
|
||||||
|
) = None,
|
||||||
|
required_by: dict[str, tuple[str, ...] | list[str]] | None = None,
|
||||||
|
):
|
||||||
|
if mutually_exclusive:
|
||||||
|
self.mutually_exclusive.extend(mutually_exclusive)
|
||||||
|
if required_together:
|
||||||
|
self.required_together.extend(required_together)
|
||||||
|
if required_one_of:
|
||||||
|
self.required_one_of.extend(required_one_of)
|
||||||
|
if required_if:
|
||||||
|
self.required_if.extend(required_if)
|
||||||
|
if required_by:
|
||||||
|
for k, v in required_by.items():
|
||||||
|
if k in self.required_by:
|
||||||
|
v = list(self.required_by[k]) + list(v)
|
||||||
|
self.required_by[k] = v
|
||||||
|
return self
|
||||||
|
|
||||||
|
def merge(self, other: t.Self) -> t.Self:
|
||||||
|
self.update_argspec(**other.argument_spec)
|
||||||
|
self.update(
|
||||||
|
mutually_exclusive=other.mutually_exclusive,
|
||||||
|
required_together=other.required_together,
|
||||||
|
required_one_of=other.required_one_of,
|
||||||
|
required_if=other.required_if,
|
||||||
|
required_by=other.required_by,
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def create_ansible_module_helper(
|
||||||
|
self, clazz: type[_T], args: tuple, **kwargs: t.Any
|
||||||
|
) -> _T:
|
||||||
|
for forbidden_name in (
|
||||||
|
"argument_spec",
|
||||||
|
"mutually_exclusive",
|
||||||
|
"required_together",
|
||||||
|
"required_one_of",
|
||||||
|
"required_if",
|
||||||
|
"required_by",
|
||||||
|
):
|
||||||
|
if forbidden_name in kwargs:
|
||||||
|
raise ValueError(
|
||||||
|
f"You must not provide a {forbidden_name} keyword parameter to create_ansible_module_helper()"
|
||||||
|
)
|
||||||
|
instance = clazz( # type: ignore
|
||||||
|
*args,
|
||||||
|
argument_spec=self.argument_spec,
|
||||||
|
mutually_exclusive=self.mutually_exclusive,
|
||||||
|
required_together=self.required_together,
|
||||||
|
required_one_of=self.required_one_of,
|
||||||
|
required_if=self.required_if,
|
||||||
|
required_by=self.required_by,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
return instance
|
||||||
|
|
||||||
|
def create_ansible_module(self, **kwargs: t.Any) -> AnsibleModule:
|
||||||
|
return self.create_ansible_module_helper(AnsibleModule, (), **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("ArgumentSpec",)
|
||||||
176
plugins/module_utils/_crypto/_asn1.py
Normal file
176
plugins/module_utils/_crypto/_asn1.py
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
# Copyright (c) 2020, Jordan Borean <jborean93@gmail.com>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import enum
|
||||||
|
import re
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
|
|
||||||
|
|
||||||
|
# An ASN.1 serialized as a string in the OpenSSL format:
|
||||||
|
# [modifier,]type[:value]
|
||||||
|
#
|
||||||
|
# 'modifier':
|
||||||
|
# The modifier can be 'IMPLICIT:<tag_number><tag_class>,' or 'EXPLICIT:<tag_number><tag_class>' where IMPLICIT
|
||||||
|
# changes the tag of the universal value to encode and EXPLICIT prefixes its tag to the existing universal value.
|
||||||
|
# The tag_number must be set while the tag_class can be 'U', 'A', 'P', or 'C" for 'Universal', 'Application',
|
||||||
|
# 'Private', or 'Context Specific' with C being the default.
|
||||||
|
#
|
||||||
|
# 'type':
|
||||||
|
# The underlying ASN.1 type of the value specified. Currently only the following have been implemented:
|
||||||
|
# UTF8: The value must be a UTF-8 encoded string.
|
||||||
|
#
|
||||||
|
# 'value':
|
||||||
|
# The value to encode, the format of this value depends on the <type> specified.
|
||||||
|
ASN1_STRING_REGEX = re.compile(
|
||||||
|
r"^((?P<tag_type>IMPLICIT|EXPLICIT):(?P<tag_number>\d+)(?P<tag_class>U|A|P|C)?,)?"
|
||||||
|
r"(?P<value_type>[\w\d]+):(?P<value>.*)"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TagClass(enum.Enum):
|
||||||
|
universal = 0
|
||||||
|
application = 1
|
||||||
|
context_specific = 2
|
||||||
|
private = 3
|
||||||
|
|
||||||
|
|
||||||
|
# Universal tag numbers that can be encoded.
|
||||||
|
class TagNumber(enum.Enum):
|
||||||
|
utf8_string = 12
|
||||||
|
|
||||||
|
|
||||||
|
def _pack_octet_integer(value: int) -> bytes:
|
||||||
|
"""Packs an integer value into 1 or multiple octets."""
|
||||||
|
# NOTE: This is *NOT* the same as packing an ASN.1 INTEGER like value.
|
||||||
|
octets = bytearray()
|
||||||
|
|
||||||
|
# Continue to shift the number by 7 bits and pack into an octet until the
|
||||||
|
# value is fully packed.
|
||||||
|
while value:
|
||||||
|
octet_value = value & 0b01111111
|
||||||
|
|
||||||
|
# First round (last octet) must have the MSB set.
|
||||||
|
if len(octets):
|
||||||
|
octet_value |= 0b10000000
|
||||||
|
|
||||||
|
octets.append(octet_value)
|
||||||
|
value >>= 7
|
||||||
|
|
||||||
|
# Reverse to ensure the higher order octets are first.
|
||||||
|
octets.reverse()
|
||||||
|
return bytes(octets)
|
||||||
|
|
||||||
|
|
||||||
|
def serialize_asn1_string_as_der(value: str) -> bytes:
|
||||||
|
"""Deserializes an ASN.1 string to a DER encoded byte string."""
|
||||||
|
asn1_match = ASN1_STRING_REGEX.match(value)
|
||||||
|
if not asn1_match:
|
||||||
|
raise ValueError(
|
||||||
|
"The ASN.1 serialized string must be in the format [modifier,]type[:value]"
|
||||||
|
)
|
||||||
|
|
||||||
|
tag_type = asn1_match.group("tag_type")
|
||||||
|
tag_number = asn1_match.group("tag_number")
|
||||||
|
tag_class = asn1_match.group("tag_class") or "C"
|
||||||
|
value_type = asn1_match.group("value_type")
|
||||||
|
asn1_value = asn1_match.group("value")
|
||||||
|
|
||||||
|
if value_type != "UTF8":
|
||||||
|
raise ValueError(
|
||||||
|
f'The ASN.1 serialized string is not a known type "{value_type}", only UTF8 types are supported'
|
||||||
|
)
|
||||||
|
|
||||||
|
b_value = to_bytes(asn1_value, encoding="utf-8", errors="surrogate_or_strict")
|
||||||
|
|
||||||
|
# We should only do a universal type tag if not IMPLICITLY tagged or the tag class is not universal.
|
||||||
|
if not tag_type or (tag_type == "EXPLICIT" and tag_class != "U"):
|
||||||
|
b_value = pack_asn1(
|
||||||
|
tag_class=TagClass.universal,
|
||||||
|
constructed=False,
|
||||||
|
tag_number=TagNumber.utf8_string,
|
||||||
|
b_data=b_value,
|
||||||
|
)
|
||||||
|
|
||||||
|
if tag_type:
|
||||||
|
tag_class_enum = {
|
||||||
|
"U": TagClass.universal,
|
||||||
|
"A": TagClass.application,
|
||||||
|
"P": TagClass.private,
|
||||||
|
"C": TagClass.context_specific,
|
||||||
|
}[tag_class]
|
||||||
|
|
||||||
|
# When adding support for more types this should be looked into further. For now it works with UTF8Strings.
|
||||||
|
constructed = tag_type == "EXPLICIT" and tag_class_enum != TagClass.universal
|
||||||
|
b_value = pack_asn1(
|
||||||
|
tag_class=tag_class_enum,
|
||||||
|
constructed=constructed,
|
||||||
|
tag_number=int(tag_number),
|
||||||
|
b_data=b_value,
|
||||||
|
)
|
||||||
|
|
||||||
|
return b_value
|
||||||
|
|
||||||
|
|
||||||
|
def pack_asn1(
|
||||||
|
*,
|
||||||
|
tag_class: TagClass,
|
||||||
|
constructed: bool,
|
||||||
|
tag_number: TagNumber | int,
|
||||||
|
b_data: bytes,
|
||||||
|
) -> bytes:
|
||||||
|
"""Pack the value into an ASN.1 data structure.
|
||||||
|
|
||||||
|
The structure for an ASN.1 element is
|
||||||
|
|
||||||
|
| Identifier Octet(s) | Length Octet(s) | Data Octet(s) |
|
||||||
|
"""
|
||||||
|
b_asn1_data = bytearray()
|
||||||
|
|
||||||
|
# Bit 8 and 7 denotes the class.
|
||||||
|
identifier_octets = tag_class.value << 6
|
||||||
|
# Bit 6 denotes whether the value is primitive or constructed.
|
||||||
|
identifier_octets |= (1 if constructed else 0) << 5
|
||||||
|
|
||||||
|
# Bits 5-1 contain the tag number, if it cannot be encoded in these 5 bits
|
||||||
|
# then they are set and another octet(s) is used to denote the tag number.
|
||||||
|
if isinstance(tag_number, TagNumber):
|
||||||
|
tag_number = tag_number.value
|
||||||
|
if tag_number < 31:
|
||||||
|
identifier_octets |= tag_number
|
||||||
|
b_asn1_data.append(identifier_octets)
|
||||||
|
else:
|
||||||
|
identifier_octets |= 31
|
||||||
|
b_asn1_data.append(identifier_octets)
|
||||||
|
b_asn1_data.extend(_pack_octet_integer(tag_number))
|
||||||
|
|
||||||
|
length = len(b_data)
|
||||||
|
|
||||||
|
# If the length can be encoded in 7 bits only 1 octet is required.
|
||||||
|
if length < 128:
|
||||||
|
b_asn1_data.append(length)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Otherwise the length must be encoded across multiple octets
|
||||||
|
length_octets = bytearray()
|
||||||
|
while length:
|
||||||
|
length_octets.append(length & 0b11111111)
|
||||||
|
length >>= 8
|
||||||
|
|
||||||
|
length_octets.reverse() # Reverse to make the higher octets first.
|
||||||
|
|
||||||
|
# The first length octet must have the MSB set alongside the number of
|
||||||
|
# octets the length was encoded in.
|
||||||
|
b_asn1_data.append(len(length_octets) | 0b10000000)
|
||||||
|
b_asn1_data.extend(length_octets)
|
||||||
|
|
||||||
|
return bytes(b_asn1_data) + b_data
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("TagClass", "TagNumber", "serialize_asn1_string_as_der", "pack_asn1")
|
||||||
@@ -4,6 +4,9 @@
|
|||||||
# dynamically by Ansible, still belong to the author of the module, and may assign
|
# dynamically by Ansible, still belong to the author of the module, and may assign
|
||||||
# their own license to the complete work.
|
# their own license to the complete work.
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
# This excerpt is dual licensed under the terms of the Apache License, Version
|
# This excerpt is dual licensed under the terms of the Apache License, Version
|
||||||
# 2.0, and the BSD License. See the LICENSE file at
|
# 2.0, and the BSD License. See the LICENSE file at
|
||||||
# https://github.com/pyca/cryptography/blob/master/LICENSE for complete details.
|
# https://github.com/pyca/cryptography/blob/master/LICENSE for complete details.
|
||||||
@@ -26,15 +29,15 @@
|
|||||||
# pyca/cryptography@3057f91ea9a05fb593825006d87a391286a4d828
|
# pyca/cryptography@3057f91ea9a05fb593825006d87a391286a4d828
|
||||||
# pyca/cryptography@d607dd7e5bc5c08854ec0c9baff70ba4a35be36f
|
# pyca/cryptography@d607dd7e5bc5c08854ec0c9baff70ba4a35be36f
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
|
|
||||||
# WARNING: this function no longer works with cryptography 35.0.0 and newer!
|
# WARNING: this function no longer works with cryptography 35.0.0 and newer!
|
||||||
# It must **ONLY** be used in compatibility code for older
|
# It must **ONLY** be used in compatibility code for older
|
||||||
# cryptography versions!
|
# cryptography versions!
|
||||||
|
|
||||||
def obj2txt(openssl_lib, openssl_ffi, obj):
|
|
||||||
|
def obj2txt(openssl_lib, openssl_ffi, obj) -> str:
|
||||||
# Set to 80 on the recommendation of
|
# Set to 80 on the recommendation of
|
||||||
# https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values
|
# https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values
|
||||||
#
|
#
|
||||||
@@ -55,3 +58,6 @@ def obj2txt(openssl_lib, openssl_ffi, obj):
|
|||||||
buf = openssl_ffi.new("char[]", buf_len)
|
buf = openssl_ffi.new("char[]", buf_len)
|
||||||
res = openssl_lib.OBJ_obj2txt(buf, buf_len, obj, 1)
|
res = openssl_lib.OBJ_obj2txt(buf, buf_len, obj, 1)
|
||||||
return openssl_ffi.buffer(buf, res)[:].decode()
|
return openssl_ffi.buffer(buf, res)[:].decode()
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("obj2txt",)
|
||||||
38
plugins/module_utils/_crypto/_objects.py
Normal file
38
plugins/module_utils/_crypto/_objects.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# Copyright (c) 2019, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto._objects_data import (
|
||||||
|
OID_MAP,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
OID_LOOKUP: dict[str, str] = {}
|
||||||
|
NORMALIZE_NAMES: dict[str, str] = {}
|
||||||
|
NORMALIZE_NAMES_SHORT: dict[str, str] = {}
|
||||||
|
|
||||||
|
for dotted, names in OID_MAP.items():
|
||||||
|
for name in names:
|
||||||
|
if name in NORMALIZE_NAMES and OID_LOOKUP[name] != dotted:
|
||||||
|
raise AssertionError(
|
||||||
|
f'Name collision during setup: "{name}" for OIDs {dotted} and {OID_LOOKUP[name]}'
|
||||||
|
)
|
||||||
|
NORMALIZE_NAMES[name] = names[0]
|
||||||
|
NORMALIZE_NAMES_SHORT[name] = names[-1]
|
||||||
|
OID_LOOKUP[name] = dotted
|
||||||
|
for alias, original in [("userID", "userId")]:
|
||||||
|
if alias in NORMALIZE_NAMES:
|
||||||
|
raise AssertionError(
|
||||||
|
f'Name collision during adding aliases: "{alias}" (alias for "{original}") is already mapped to OID {OID_LOOKUP[alias]}'
|
||||||
|
)
|
||||||
|
NORMALIZE_NAMES[alias] = original
|
||||||
|
NORMALIZE_NAMES_SHORT[alias] = NORMALIZE_NAMES_SHORT[original]
|
||||||
|
OID_LOOKUP[alias] = OID_LOOKUP[original]
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("OID_LOOKUP", "NORMALIZE_NAMES", "NORMALIZE_NAMES_SHORT")
|
||||||
1180
plugins/module_utils/_crypto/_objects_data.py
Normal file
1180
plugins/module_utils/_crypto/_objects_data.py
Normal file
File diff suppressed because it is too large
Load Diff
29
plugins/module_utils/_crypto/basic.py
Normal file
29
plugins/module_utils/_crypto/basic.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
# Copyright (c) 2016, Yanis Guenane <yanis+ansible@guenane.org>
|
||||||
|
# Copyright (c) 2020, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cryptography # noqa: F401, pylint: disable=unused-import
|
||||||
|
|
||||||
|
HAS_CRYPTOGRAPHY = True
|
||||||
|
except ImportError:
|
||||||
|
# Error handled in the calling module.
|
||||||
|
HAS_CRYPTOGRAPHY = False
|
||||||
|
|
||||||
|
|
||||||
|
class OpenSSLObjectError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class OpenSSLBadPassphraseError(OpenSSLObjectError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("HAS_CRYPTOGRAPHY", "OpenSSLObjectError", "OpenSSLBadPassphraseError")
|
||||||
210
plugins/module_utils/_crypto/cryptography_crl.py
Normal file
210
plugins/module_utils/_crypto/cryptography_crl.py
Normal file
@@ -0,0 +1,210 @@
|
|||||||
|
# Copyright (c) 2019, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._version import (
|
||||||
|
LooseVersion as _LooseVersion,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cryptography
|
||||||
|
from cryptography import x509
|
||||||
|
except ImportError:
|
||||||
|
# Error handled in the calling module.
|
||||||
|
pass
|
||||||
|
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto._obj2txt import (
|
||||||
|
obj2txt,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
|
HAS_CRYPTOGRAPHY,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_support import (
|
||||||
|
CRYPTOGRAPHY_TIMEZONE,
|
||||||
|
cryptography_decode_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: once cryptography has a _utc variant of InvalidityDate.invalidity_date, set this
|
||||||
|
# to True and adjust get_invalidity_date() accordingly.
|
||||||
|
# (https://github.com/pyca/cryptography/issues/10818)
|
||||||
|
CRYPTOGRAPHY_TIMEZONE_INVALIDITY_DATE = False
|
||||||
|
if HAS_CRYPTOGRAPHY:
|
||||||
|
CRYPTOGRAPHY_TIMEZONE_INVALIDITY_DATE = _LooseVersion(
|
||||||
|
cryptography.__version__
|
||||||
|
) >= _LooseVersion("43.0.0")
|
||||||
|
|
||||||
|
TIMESTAMP_FORMAT = "%Y%m%d%H%M%SZ"
|
||||||
|
|
||||||
|
|
||||||
|
if HAS_CRYPTOGRAPHY:
|
||||||
|
REVOCATION_REASON_MAP = {
|
||||||
|
"unspecified": x509.ReasonFlags.unspecified,
|
||||||
|
"key_compromise": x509.ReasonFlags.key_compromise,
|
||||||
|
"ca_compromise": x509.ReasonFlags.ca_compromise,
|
||||||
|
"affiliation_changed": x509.ReasonFlags.affiliation_changed,
|
||||||
|
"superseded": x509.ReasonFlags.superseded,
|
||||||
|
"cessation_of_operation": x509.ReasonFlags.cessation_of_operation,
|
||||||
|
"certificate_hold": x509.ReasonFlags.certificate_hold,
|
||||||
|
"privilege_withdrawn": x509.ReasonFlags.privilege_withdrawn,
|
||||||
|
"aa_compromise": x509.ReasonFlags.aa_compromise,
|
||||||
|
"remove_from_crl": x509.ReasonFlags.remove_from_crl,
|
||||||
|
}
|
||||||
|
REVOCATION_REASON_MAP_INVERSE = {}
|
||||||
|
for k, v in REVOCATION_REASON_MAP.items():
|
||||||
|
REVOCATION_REASON_MAP_INVERSE[v] = k
|
||||||
|
|
||||||
|
else:
|
||||||
|
REVOCATION_REASON_MAP = {}
|
||||||
|
REVOCATION_REASON_MAP_INVERSE = {}
|
||||||
|
|
||||||
|
|
||||||
|
def cryptography_decode_revoked_certificate(
|
||||||
|
cert: x509.RevokedCertificate,
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
result = {
|
||||||
|
"serial_number": cert.serial_number,
|
||||||
|
"revocation_date": get_revocation_date(cert),
|
||||||
|
"issuer": None,
|
||||||
|
"issuer_critical": False,
|
||||||
|
"reason": None,
|
||||||
|
"reason_critical": False,
|
||||||
|
"invalidity_date": None,
|
||||||
|
"invalidity_date_critical": False,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
ext_ci = cert.extensions.get_extension_for_class(x509.CertificateIssuer)
|
||||||
|
result["issuer"] = list(ext_ci.value)
|
||||||
|
result["issuer_critical"] = ext_ci.critical
|
||||||
|
except x509.ExtensionNotFound:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
ext_cr = cert.extensions.get_extension_for_class(x509.CRLReason)
|
||||||
|
result["reason"] = ext_cr.value.reason
|
||||||
|
result["reason_critical"] = ext_cr.critical
|
||||||
|
except x509.ExtensionNotFound:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
ext_id = cert.extensions.get_extension_for_class(x509.InvalidityDate)
|
||||||
|
result["invalidity_date"] = get_invalidity_date(ext_id.value)
|
||||||
|
result["invalidity_date_critical"] = ext_id.critical
|
||||||
|
except x509.ExtensionNotFound:
|
||||||
|
pass
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def cryptography_dump_revoked(
|
||||||
|
entry: dict[str, t.Any],
|
||||||
|
*,
|
||||||
|
idn_rewrite: t.Literal["ignore", "idna", "unicode"] = "ignore",
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
return {
|
||||||
|
"serial_number": entry["serial_number"],
|
||||||
|
"revocation_date": entry["revocation_date"].strftime(TIMESTAMP_FORMAT),
|
||||||
|
"issuer": (
|
||||||
|
[
|
||||||
|
cryptography_decode_name(issuer, idn_rewrite=idn_rewrite)
|
||||||
|
for issuer in entry["issuer"]
|
||||||
|
]
|
||||||
|
if entry["issuer"] is not None
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
"issuer_critical": entry["issuer_critical"],
|
||||||
|
"reason": (
|
||||||
|
REVOCATION_REASON_MAP_INVERSE.get(entry["reason"])
|
||||||
|
if entry["reason"] is not None
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
"reason_critical": entry["reason_critical"],
|
||||||
|
"invalidity_date": (
|
||||||
|
entry["invalidity_date"].strftime(TIMESTAMP_FORMAT)
|
||||||
|
if entry["invalidity_date"] is not None
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
"invalidity_date_critical": entry["invalidity_date_critical"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def cryptography_get_signature_algorithm_oid_from_crl(
|
||||||
|
crl: x509.CertificateRevocationList,
|
||||||
|
) -> x509.oid.ObjectIdentifier:
|
||||||
|
try:
|
||||||
|
return crl.signature_algorithm_oid
|
||||||
|
except AttributeError:
|
||||||
|
# Older cryptography versions do not have signature_algorithm_oid yet
|
||||||
|
dotted = obj2txt(
|
||||||
|
crl._backend._lib, # type: ignore[attr-defined] # pylint: disable=protected-access
|
||||||
|
crl._backend._ffi, # type: ignore[attr-defined] # pylint: disable=protected-access
|
||||||
|
crl._x509_crl.sig_alg.algorithm, # type: ignore[attr-defined] # pylint: disable=protected-access
|
||||||
|
)
|
||||||
|
return x509.oid.ObjectIdentifier(dotted)
|
||||||
|
|
||||||
|
|
||||||
|
def get_next_update(obj: x509.CertificateRevocationList) -> datetime.datetime | None:
|
||||||
|
if CRYPTOGRAPHY_TIMEZONE:
|
||||||
|
return obj.next_update_utc
|
||||||
|
return obj.next_update
|
||||||
|
|
||||||
|
|
||||||
|
def get_last_update(obj: x509.CertificateRevocationList) -> datetime.datetime:
|
||||||
|
if CRYPTOGRAPHY_TIMEZONE:
|
||||||
|
return obj.last_update_utc
|
||||||
|
return obj.last_update
|
||||||
|
|
||||||
|
|
||||||
|
def get_revocation_date(obj: x509.RevokedCertificate) -> datetime.datetime:
|
||||||
|
if CRYPTOGRAPHY_TIMEZONE:
|
||||||
|
return obj.revocation_date_utc
|
||||||
|
return obj.revocation_date
|
||||||
|
|
||||||
|
|
||||||
|
def get_invalidity_date(obj: x509.InvalidityDate) -> datetime.datetime:
|
||||||
|
if CRYPTOGRAPHY_TIMEZONE_INVALIDITY_DATE:
|
||||||
|
return obj.invalidity_date_utc
|
||||||
|
return obj.invalidity_date
|
||||||
|
|
||||||
|
|
||||||
|
def set_next_update(
|
||||||
|
builder: x509.CertificateRevocationListBuilder, *, value: datetime.datetime
|
||||||
|
) -> x509.CertificateRevocationListBuilder:
|
||||||
|
return builder.next_update(value)
|
||||||
|
|
||||||
|
|
||||||
|
def set_last_update(
|
||||||
|
builder: x509.CertificateRevocationListBuilder, *, value: datetime.datetime
|
||||||
|
) -> x509.CertificateRevocationListBuilder:
|
||||||
|
return builder.last_update(value)
|
||||||
|
|
||||||
|
|
||||||
|
def set_revocation_date(
|
||||||
|
builder: x509.RevokedCertificateBuilder, *, value: datetime.datetime
|
||||||
|
) -> x509.RevokedCertificateBuilder:
|
||||||
|
return builder.revocation_date(value)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"REVOCATION_REASON_MAP",
|
||||||
|
"REVOCATION_REASON_MAP_INVERSE",
|
||||||
|
"cryptography_decode_revoked_certificate",
|
||||||
|
"cryptography_dump_revoked",
|
||||||
|
"cryptography_get_signature_algorithm_oid_from_crl",
|
||||||
|
"get_next_update",
|
||||||
|
"get_last_update",
|
||||||
|
"get_revocation_date",
|
||||||
|
"get_invalidity_date",
|
||||||
|
"set_next_update",
|
||||||
|
"set_last_update",
|
||||||
|
"set_revocation_date",
|
||||||
|
)
|
||||||
1134
plugins/module_utils/_crypto/cryptography_support.py
Normal file
1134
plugins/module_utils/_crypto/cryptography_support.py
Normal file
File diff suppressed because it is too large
Load Diff
170
plugins/module_utils/_crypto/math.py
Normal file
170
plugins/module_utils/_crypto/math.py
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
# Copyright (c) 2019, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
|
def binary_exp_mod(f: int, e: int, *, m: int) -> int:
|
||||||
|
"""Computes f^e mod m in O(log e) multiplications modulo m."""
|
||||||
|
# Compute len_e = floor(log_2(e))
|
||||||
|
len_e = -1
|
||||||
|
x = e
|
||||||
|
while x > 0:
|
||||||
|
x >>= 1
|
||||||
|
len_e += 1
|
||||||
|
# Compute f**e mod m
|
||||||
|
result = 1
|
||||||
|
for k in range(len_e, -1, -1):
|
||||||
|
result = (result * result) % m
|
||||||
|
if ((e >> k) & 1) != 0:
|
||||||
|
result = (result * f) % m
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def simple_gcd(a: int, b: int) -> int:
|
||||||
|
"""Compute GCD of its two inputs."""
|
||||||
|
while b != 0:
|
||||||
|
a, b = b, a % b
|
||||||
|
return a
|
||||||
|
|
||||||
|
|
||||||
|
def quick_is_not_prime(n: int) -> bool:
|
||||||
|
"""Does some quick checks to see if we can poke a hole into the primality of n.
|
||||||
|
|
||||||
|
A result of `False` does **not** mean that the number is prime; it just means
|
||||||
|
that we could not detect quickly whether it is not prime.
|
||||||
|
"""
|
||||||
|
if n <= 2:
|
||||||
|
return n < 2
|
||||||
|
# The constant in the next line is the product of all primes < 200
|
||||||
|
prime_product = 7799922041683461553249199106329813876687996789903550945093032474868511536164700810
|
||||||
|
gcd = simple_gcd(n, prime_product)
|
||||||
|
if gcd > 1:
|
||||||
|
if n < 200 and gcd == n:
|
||||||
|
# Explicitly check for all primes < 200
|
||||||
|
return n not in (
|
||||||
|
2,
|
||||||
|
3,
|
||||||
|
5,
|
||||||
|
7,
|
||||||
|
11,
|
||||||
|
13,
|
||||||
|
17,
|
||||||
|
19,
|
||||||
|
23,
|
||||||
|
29,
|
||||||
|
31,
|
||||||
|
37,
|
||||||
|
41,
|
||||||
|
43,
|
||||||
|
47,
|
||||||
|
53,
|
||||||
|
59,
|
||||||
|
61,
|
||||||
|
67,
|
||||||
|
71,
|
||||||
|
73,
|
||||||
|
79,
|
||||||
|
83,
|
||||||
|
89,
|
||||||
|
97,
|
||||||
|
101,
|
||||||
|
103,
|
||||||
|
107,
|
||||||
|
109,
|
||||||
|
113,
|
||||||
|
127,
|
||||||
|
131,
|
||||||
|
137,
|
||||||
|
139,
|
||||||
|
149,
|
||||||
|
151,
|
||||||
|
157,
|
||||||
|
163,
|
||||||
|
167,
|
||||||
|
173,
|
||||||
|
179,
|
||||||
|
181,
|
||||||
|
191,
|
||||||
|
193,
|
||||||
|
197,
|
||||||
|
199,
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
# TODO: maybe do some iterations of Miller-Rabin to increase confidence
|
||||||
|
# (https://en.wikipedia.org/wiki/Miller%E2%80%93Rabin_primality_test)
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def count_bytes(no: int) -> int:
|
||||||
|
"""
|
||||||
|
Given an integer, compute the number of bytes necessary to store its absolute value.
|
||||||
|
"""
|
||||||
|
no = abs(no)
|
||||||
|
if no == 0:
|
||||||
|
return 0
|
||||||
|
return (no.bit_length() + 7) // 8
|
||||||
|
|
||||||
|
|
||||||
|
def count_bits(no: int) -> int:
|
||||||
|
"""
|
||||||
|
Given an integer, compute the number of bits necessary to store its absolute value.
|
||||||
|
"""
|
||||||
|
no = abs(no)
|
||||||
|
if no == 0:
|
||||||
|
return 0
|
||||||
|
return no.bit_length()
|
||||||
|
|
||||||
|
|
||||||
|
def convert_int_to_bytes(no: int, *, count: int | None = None) -> bytes:
|
||||||
|
"""
|
||||||
|
Convert the absolute value of an integer to a byte string in network byte order.
|
||||||
|
|
||||||
|
If ``count`` is provided, it must be sufficiently large so that the integer's
|
||||||
|
absolute value can be represented with these number of bytes. The resulting byte
|
||||||
|
string will have length exactly ``count``.
|
||||||
|
|
||||||
|
The value zero will be converted to an empty byte string if ``count`` is provided.
|
||||||
|
"""
|
||||||
|
no = abs(no)
|
||||||
|
if count is None:
|
||||||
|
count = count_bytes(no)
|
||||||
|
return no.to_bytes(count, byteorder="big")
|
||||||
|
|
||||||
|
|
||||||
|
def convert_int_to_hex(no: int, *, digits: int | None = None) -> str:
|
||||||
|
"""
|
||||||
|
Convert the absolute value of an integer to a string of hexadecimal digits.
|
||||||
|
|
||||||
|
If ``digits`` is provided, the string will be padded on the left with ``0``s so
|
||||||
|
that the returned value has length ``digits``. If ``digits`` is not sufficient,
|
||||||
|
the string will be longer.
|
||||||
|
"""
|
||||||
|
no = abs(no)
|
||||||
|
value = f"{no:x}"
|
||||||
|
if digits is not None and len(value) < digits:
|
||||||
|
value = "0" * (digits - len(value)) + value
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def convert_bytes_to_int(data: bytes) -> int:
|
||||||
|
"""
|
||||||
|
Convert a byte string to an unsigned integer in network byte order.
|
||||||
|
"""
|
||||||
|
return int.from_bytes(data, byteorder="big", signed=False)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"binary_exp_mod",
|
||||||
|
"simple_gcd",
|
||||||
|
"quick_is_not_prime",
|
||||||
|
"count_bytes",
|
||||||
|
"count_bits",
|
||||||
|
"convert_int_to_bytes",
|
||||||
|
"convert_int_to_hex",
|
||||||
|
"convert_bytes_to_int",
|
||||||
|
)
|
||||||
417
plugins/module_utils/_crypto/module_backends/certificate.py
Normal file
417
plugins/module_utils/_crypto/module_backends/certificate.py
Normal file
@@ -0,0 +1,417 @@
|
|||||||
|
# Copyright (c) 2016-2017, Yanis Guenane <yanis+ansible@guenane.org>
|
||||||
|
# Copyright (c) 2017, Markus Teufelberger <mteufelberger+ansible@mgit.at>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._argspec import (
|
||||||
|
ArgumentSpec,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
|
OpenSSLBadPassphraseError,
|
||||||
|
OpenSSLObjectError,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_support import (
|
||||||
|
cryptography_compare_public_keys,
|
||||||
|
get_not_valid_after,
|
||||||
|
get_not_valid_before,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.certificate_info import (
|
||||||
|
get_certificate_info,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.support import (
|
||||||
|
load_certificate,
|
||||||
|
load_certificate_privatekey,
|
||||||
|
load_certificate_request,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._cryptography_dep import (
|
||||||
|
COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION,
|
||||||
|
assert_required_cryptography_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_support import (
|
||||||
|
CertificatePrivateKeyTypes,
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import (
|
||||||
|
CertificateIssuerPrivateKeyTypes,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
MINIMAL_CRYPTOGRAPHY_VERSION = COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cryptography
|
||||||
|
from cryptography import x509
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class CertificateError(OpenSSLObjectError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class CertificateBackend(metaclass=abc.ABCMeta):
|
||||||
|
def __init__(self, *, module: AnsibleModule) -> None:
|
||||||
|
self.module = module
|
||||||
|
|
||||||
|
self.force: bool = module.params["force"]
|
||||||
|
self.ignore_timestamps: bool = module.params["ignore_timestamps"]
|
||||||
|
self.privatekey_path: str | None = module.params["privatekey_path"]
|
||||||
|
privatekey_content: str | None = module.params["privatekey_content"]
|
||||||
|
if privatekey_content is not None:
|
||||||
|
self.privatekey_content: bytes | None = privatekey_content.encode("utf-8")
|
||||||
|
else:
|
||||||
|
self.privatekey_content = None
|
||||||
|
self.privatekey_passphrase: str | None = module.params["privatekey_passphrase"]
|
||||||
|
self.csr_path: str | None = module.params["csr_path"]
|
||||||
|
csr_content = module.params["csr_content"]
|
||||||
|
if csr_content is not None:
|
||||||
|
self.csr_content: bytes | None = csr_content.encode("utf-8")
|
||||||
|
else:
|
||||||
|
self.csr_content = None
|
||||||
|
|
||||||
|
# The following are default values which make sure check() works as
|
||||||
|
# before if providers do not explicitly change these properties.
|
||||||
|
self.create_subject_key_identifier: str = "never_create"
|
||||||
|
self.create_authority_key_identifier: bool = False
|
||||||
|
|
||||||
|
self.privatekey: CertificatePrivateKeyTypes | None = None
|
||||||
|
self.csr: x509.CertificateSigningRequest | None = None
|
||||||
|
self.cert: x509.Certificate | None = None
|
||||||
|
self.existing_certificate: x509.Certificate | None = None
|
||||||
|
self.existing_certificate_bytes: bytes | None = None
|
||||||
|
|
||||||
|
self.check_csr_subject: bool = True
|
||||||
|
self.check_csr_extensions: bool = True
|
||||||
|
|
||||||
|
self.diff_before = self._get_info(None)
|
||||||
|
self.diff_after = self._get_info(None)
|
||||||
|
|
||||||
|
def _get_info(self, data: bytes | None) -> dict[str, t.Any]:
|
||||||
|
if data is None:
|
||||||
|
return {}
|
||||||
|
try:
|
||||||
|
result = get_certificate_info(
|
||||||
|
module=self.module, content=data, prefer_one_fingerprint=True
|
||||||
|
)
|
||||||
|
result["can_parse_certificate"] = True
|
||||||
|
return result
|
||||||
|
except Exception:
|
||||||
|
return {"can_parse_certificate": False}
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def generate_certificate(self) -> None:
|
||||||
|
"""(Re-)Generate certificate."""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def get_certificate_data(self) -> bytes:
|
||||||
|
"""Return bytes for self.cert."""
|
||||||
|
|
||||||
|
def set_existing(self, certificate_bytes: bytes | None) -> None:
|
||||||
|
"""Set existing certificate bytes. None indicates that the key does not exist."""
|
||||||
|
self.existing_certificate_bytes = certificate_bytes
|
||||||
|
self.diff_after = self.diff_before = self._get_info(
|
||||||
|
self.existing_certificate_bytes
|
||||||
|
)
|
||||||
|
|
||||||
|
def has_existing(self) -> bool:
|
||||||
|
"""Query whether an existing certificate is/has been there."""
|
||||||
|
return self.existing_certificate_bytes is not None
|
||||||
|
|
||||||
|
def _ensure_private_key_loaded(self) -> None:
|
||||||
|
"""Load the provided private key into self.privatekey."""
|
||||||
|
if self.privatekey is not None:
|
||||||
|
return
|
||||||
|
if self.privatekey_path is None and self.privatekey_content is None:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
self.privatekey = load_certificate_privatekey(
|
||||||
|
path=self.privatekey_path,
|
||||||
|
content=self.privatekey_content,
|
||||||
|
passphrase=self.privatekey_passphrase,
|
||||||
|
)
|
||||||
|
except OpenSSLBadPassphraseError as exc:
|
||||||
|
raise CertificateError(exc) from exc
|
||||||
|
|
||||||
|
def _ensure_csr_loaded(self) -> None:
|
||||||
|
"""Load the CSR into self.csr."""
|
||||||
|
if self.csr is not None:
|
||||||
|
return
|
||||||
|
if self.csr_path is None and self.csr_content is None:
|
||||||
|
return
|
||||||
|
self.csr = load_certificate_request(
|
||||||
|
path=self.csr_path,
|
||||||
|
content=self.csr_content,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _ensure_existing_certificate_loaded(self) -> None:
|
||||||
|
"""Load the existing certificate into self.existing_certificate."""
|
||||||
|
if self.existing_certificate is not None:
|
||||||
|
return
|
||||||
|
if self.existing_certificate_bytes is None:
|
||||||
|
return
|
||||||
|
self.existing_certificate = load_certificate(
|
||||||
|
path=None,
|
||||||
|
content=self.existing_certificate_bytes,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _check_privatekey(self) -> bool:
|
||||||
|
"""Check whether provided parameters match, assuming self.existing_certificate and self.privatekey have been populated."""
|
||||||
|
if self.existing_certificate is None:
|
||||||
|
raise AssertionError(
|
||||||
|
"Contract violation: existing_certificate has not been populated"
|
||||||
|
)
|
||||||
|
if self.privatekey is None:
|
||||||
|
raise AssertionError(
|
||||||
|
"Contract violation: privatekey has not been populated"
|
||||||
|
)
|
||||||
|
return cryptography_compare_public_keys(
|
||||||
|
self.existing_certificate.public_key(), self.privatekey.public_key()
|
||||||
|
)
|
||||||
|
|
||||||
|
def _check_csr(self) -> bool:
|
||||||
|
"""Check whether provided parameters match, assuming self.existing_certificate and self.csr have been populated."""
|
||||||
|
if self.existing_certificate is None:
|
||||||
|
raise AssertionError(
|
||||||
|
"Contract violation: existing_certificate has not been populated"
|
||||||
|
)
|
||||||
|
if self.csr is None:
|
||||||
|
raise AssertionError("Contract violation: csr has not been populated")
|
||||||
|
# Verify that CSR is signed by certificate's private key
|
||||||
|
if not self.csr.is_signature_valid:
|
||||||
|
return False
|
||||||
|
if not cryptography_compare_public_keys(
|
||||||
|
self.csr.public_key(), self.existing_certificate.public_key()
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
# Check subject
|
||||||
|
if (
|
||||||
|
self.check_csr_subject
|
||||||
|
and self.csr.subject != self.existing_certificate.subject
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
# Check extensions
|
||||||
|
if not self.check_csr_extensions:
|
||||||
|
return True
|
||||||
|
cert_exts = list(self.existing_certificate.extensions)
|
||||||
|
csr_exts = list(self.csr.extensions)
|
||||||
|
if self.create_subject_key_identifier != "never_create":
|
||||||
|
# Filter out SubjectKeyIdentifier extension before comparison
|
||||||
|
cert_exts = list(
|
||||||
|
filter(
|
||||||
|
lambda x: not isinstance(x.value, x509.SubjectKeyIdentifier),
|
||||||
|
cert_exts,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
csr_exts = list(
|
||||||
|
filter(
|
||||||
|
lambda x: not isinstance(x.value, x509.SubjectKeyIdentifier),
|
||||||
|
csr_exts,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if self.create_authority_key_identifier:
|
||||||
|
# Filter out AuthorityKeyIdentifier extension before comparison
|
||||||
|
cert_exts = list(
|
||||||
|
filter(
|
||||||
|
lambda x: not isinstance(x.value, x509.AuthorityKeyIdentifier),
|
||||||
|
cert_exts,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
csr_exts = list(
|
||||||
|
filter(
|
||||||
|
lambda x: not isinstance(x.value, x509.AuthorityKeyIdentifier),
|
||||||
|
csr_exts,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if len(cert_exts) != len(csr_exts):
|
||||||
|
return False
|
||||||
|
for cert_ext in cert_exts:
|
||||||
|
try:
|
||||||
|
csr_ext = self.csr.extensions.get_extension_for_oid(cert_ext.oid)
|
||||||
|
if cert_ext != csr_ext:
|
||||||
|
return False
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _check_subject_key_identifier(self) -> bool:
|
||||||
|
"""Check whether Subject Key Identifier matches, assuming self.existing_certificate and self.csr have been populated."""
|
||||||
|
if self.existing_certificate is None:
|
||||||
|
raise AssertionError(
|
||||||
|
"Contract violation: existing_certificate has not been populated"
|
||||||
|
)
|
||||||
|
if self.csr is None:
|
||||||
|
raise AssertionError("Contract violation: csr has not been populated")
|
||||||
|
# Get hold of certificate's SKI
|
||||||
|
try:
|
||||||
|
ext = self.existing_certificate.extensions.get_extension_for_class(
|
||||||
|
x509.SubjectKeyIdentifier
|
||||||
|
)
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return False
|
||||||
|
# Get hold of CSR's SKI for 'create_if_not_provided'
|
||||||
|
csr_ext = None
|
||||||
|
if self.create_subject_key_identifier == "create_if_not_provided":
|
||||||
|
try:
|
||||||
|
csr_ext = self.csr.extensions.get_extension_for_class(
|
||||||
|
x509.SubjectKeyIdentifier
|
||||||
|
)
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
pass
|
||||||
|
if csr_ext is None:
|
||||||
|
# If CSR had no SKI, or we chose to ignore it ('always_create'), compare with created SKI
|
||||||
|
if (
|
||||||
|
ext.value.digest
|
||||||
|
!= x509.SubjectKeyIdentifier.from_public_key(
|
||||||
|
self.existing_certificate.public_key()
|
||||||
|
).digest
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
# If CSR had SKI and we did not ignore it ('create_if_not_provided'), compare SKIs
|
||||||
|
if ext.value.digest != csr_ext.value.digest:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def needs_regeneration(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
not_before: datetime.datetime | None = None,
|
||||||
|
not_after: datetime.datetime | None = None,
|
||||||
|
) -> bool:
|
||||||
|
"""Check whether a regeneration is necessary."""
|
||||||
|
if self.force or self.existing_certificate_bytes is None:
|
||||||
|
return True
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._ensure_existing_certificate_loaded()
|
||||||
|
except Exception:
|
||||||
|
return True
|
||||||
|
assert self.existing_certificate is not None
|
||||||
|
|
||||||
|
# Check whether private key matches
|
||||||
|
self._ensure_private_key_loaded()
|
||||||
|
if self.privatekey is not None and not self._check_privatekey():
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check whether CSR matches
|
||||||
|
self._ensure_csr_loaded()
|
||||||
|
if self.csr is not None and not self._check_csr():
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check SubjectKeyIdentifier
|
||||||
|
if (
|
||||||
|
self.create_subject_key_identifier != "never_create"
|
||||||
|
and not self._check_subject_key_identifier()
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check not before
|
||||||
|
if not_before is not None and not self.ignore_timestamps:
|
||||||
|
if get_not_valid_before(self.existing_certificate) != not_before:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check not after
|
||||||
|
if not_after is not None and not self.ignore_timestamps:
|
||||||
|
if get_not_valid_after(self.existing_certificate) != not_after:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def dump(self, *, include_certificate: bool) -> dict[str, t.Any]:
|
||||||
|
"""Serialize the object into a dictionary."""
|
||||||
|
result: dict[str, t.Any] = {
|
||||||
|
"privatekey": self.privatekey_path,
|
||||||
|
"csr": self.csr_path,
|
||||||
|
}
|
||||||
|
# Get hold of certificate bytes
|
||||||
|
certificate_bytes = self.existing_certificate_bytes
|
||||||
|
if self.cert is not None:
|
||||||
|
certificate_bytes = self.get_certificate_data()
|
||||||
|
self.diff_after = self._get_info(certificate_bytes)
|
||||||
|
if include_certificate:
|
||||||
|
# Store result
|
||||||
|
result["certificate"] = (
|
||||||
|
certificate_bytes.decode("utf-8") if certificate_bytes else None
|
||||||
|
)
|
||||||
|
|
||||||
|
result["diff"] = {
|
||||||
|
"before": self.diff_before,
|
||||||
|
"after": self.diff_after,
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class CertificateProvider(metaclass=abc.ABCMeta):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def validate_module_args(self, module: AnsibleModule) -> None:
|
||||||
|
"""Check module arguments"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def create_backend(self, module: AnsibleModule) -> CertificateBackend:
|
||||||
|
"""Create an implementation for a backend.
|
||||||
|
|
||||||
|
Return value must be instance of CertificateBackend.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def select_backend(
|
||||||
|
*, module: AnsibleModule, provider: CertificateProvider
|
||||||
|
) -> CertificateBackend:
|
||||||
|
provider.validate_module_args(module)
|
||||||
|
|
||||||
|
assert_required_cryptography_version(
|
||||||
|
module, minimum_cryptography_version=MINIMAL_CRYPTOGRAPHY_VERSION
|
||||||
|
)
|
||||||
|
|
||||||
|
return provider.create_backend(module)
|
||||||
|
|
||||||
|
|
||||||
|
def get_certificate_argument_spec() -> ArgumentSpec:
|
||||||
|
return ArgumentSpec(
|
||||||
|
argument_spec={
|
||||||
|
"provider": {
|
||||||
|
"type": "str",
|
||||||
|
"choices": [],
|
||||||
|
}, # choices will be filled by add_XXX_provider_to_argument_spec() in certificate_xxx.py
|
||||||
|
"force": {
|
||||||
|
"type": "bool",
|
||||||
|
"default": False,
|
||||||
|
},
|
||||||
|
"csr_path": {"type": "path"},
|
||||||
|
"csr_content": {"type": "str"},
|
||||||
|
"ignore_timestamps": {"type": "bool", "default": True},
|
||||||
|
"select_crypto_backend": {
|
||||||
|
"type": "str",
|
||||||
|
"default": "auto",
|
||||||
|
"choices": ["auto", "cryptography"],
|
||||||
|
},
|
||||||
|
# General properties of a certificate
|
||||||
|
"privatekey_path": {"type": "path"},
|
||||||
|
"privatekey_content": {"type": "str", "no_log": True},
|
||||||
|
"privatekey_passphrase": {"type": "str", "no_log": True},
|
||||||
|
},
|
||||||
|
mutually_exclusive=[
|
||||||
|
["csr_path", "csr_content"],
|
||||||
|
["privatekey_path", "privatekey_content"],
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"CertificateError",
|
||||||
|
"CertificateBackend",
|
||||||
|
"CertificateProvider",
|
||||||
|
"get_certificate_argument_spec",
|
||||||
|
)
|
||||||
145
plugins/module_utils/_crypto/module_backends/certificate_acme.py
Normal file
145
plugins/module_utils/_crypto/module_backends/certificate_acme.py
Normal file
@@ -0,0 +1,145 @@
|
|||||||
|
# Copyright (c) 2016-2017, Yanis Guenane <yanis+ansible@guenane.org>
|
||||||
|
# Copyright (c) 2017, Markus Teufelberger <mteufelberger+ansible@mgit.at>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
import traceback
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.certificate import (
|
||||||
|
CertificateBackend,
|
||||||
|
CertificateError,
|
||||||
|
CertificateProvider,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._argspec import (
|
||||||
|
ArgumentSpec,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AcmeCertificateBackend(CertificateBackend):
|
||||||
|
def __init__(self, *, module: AnsibleModule) -> None:
|
||||||
|
super().__init__(module=module)
|
||||||
|
self.accountkey_path: str = module.params["acme_accountkey_path"]
|
||||||
|
self.challenge_path: str = module.params["acme_challenge_path"]
|
||||||
|
self.use_chain: bool = module.params["acme_chain"]
|
||||||
|
self.acme_directory: str = module.params["acme_directory"]
|
||||||
|
self.cert_bytes: bytes | None = None
|
||||||
|
|
||||||
|
if self.csr_content is None:
|
||||||
|
if self.csr_path is None:
|
||||||
|
raise CertificateError(
|
||||||
|
"csr_path or csr_content is required for ownca provider"
|
||||||
|
)
|
||||||
|
if not os.path.exists(self.csr_path):
|
||||||
|
raise CertificateError(
|
||||||
|
f"The certificate signing request file {self.csr_path} does not exist"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not os.path.exists(self.accountkey_path):
|
||||||
|
raise CertificateError(
|
||||||
|
f"The account key {self.accountkey_path} does not exist"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not os.path.exists(self.challenge_path):
|
||||||
|
raise CertificateError(
|
||||||
|
f"The challenge path {self.challenge_path} does not exist"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.acme_tiny_path = self.module.get_bin_path("acme-tiny", required=True)
|
||||||
|
|
||||||
|
def generate_certificate(self) -> None:
|
||||||
|
"""(Re-)Generate certificate."""
|
||||||
|
|
||||||
|
command = [self.acme_tiny_path]
|
||||||
|
if self.use_chain:
|
||||||
|
command.append("--chain")
|
||||||
|
command.extend(["--account-key", self.accountkey_path])
|
||||||
|
if self.csr_content is not None:
|
||||||
|
# We need to temporarily write the CSR to disk
|
||||||
|
fd, tmpsrc = tempfile.mkstemp()
|
||||||
|
self.module.add_cleanup_file(tmpsrc) # Ansible will delete the file on exit
|
||||||
|
f = os.fdopen(fd, "wb")
|
||||||
|
try:
|
||||||
|
f.write(self.csr_content)
|
||||||
|
except Exception as err:
|
||||||
|
try:
|
||||||
|
f.close()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
self.module.fail_json(
|
||||||
|
msg=f"failed to create temporary CSR file: {err}",
|
||||||
|
exception=traceback.format_exc(),
|
||||||
|
)
|
||||||
|
f.close()
|
||||||
|
command.extend(["--csr", tmpsrc])
|
||||||
|
else:
|
||||||
|
command.extend(["--csr", self.csr_path])
|
||||||
|
command.extend(["--acme-dir", self.challenge_path])
|
||||||
|
command.extend(["--directory-url", self.acme_directory])
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.cert_bytes = to_bytes(
|
||||||
|
self.module.run_command(command, check_rc=True)[1]
|
||||||
|
)
|
||||||
|
except OSError as exc:
|
||||||
|
raise CertificateError(exc) from exc
|
||||||
|
|
||||||
|
def get_certificate_data(self) -> bytes:
|
||||||
|
"""Return bytes for self.cert."""
|
||||||
|
if self.cert_bytes is None:
|
||||||
|
raise AssertionError("Contract violation: cert_bytes is None")
|
||||||
|
return self.cert_bytes
|
||||||
|
|
||||||
|
def dump(self, *, include_certificate: bool) -> dict[str, t.Any]:
|
||||||
|
result = super().dump(include_certificate=include_certificate)
|
||||||
|
result["accountkey"] = self.accountkey_path
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class AcmeCertificateProvider(CertificateProvider):
|
||||||
|
def validate_module_args(self, module: AnsibleModule) -> None:
|
||||||
|
if module.params["acme_accountkey_path"] is None:
|
||||||
|
module.fail_json(
|
||||||
|
msg="The acme_accountkey_path option must be specified for the acme provider."
|
||||||
|
)
|
||||||
|
if module.params["acme_challenge_path"] is None:
|
||||||
|
module.fail_json(
|
||||||
|
msg="The acme_challenge_path option must be specified for the acme provider."
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_backend(self, module: AnsibleModule) -> AcmeCertificateBackend:
|
||||||
|
return AcmeCertificateBackend(module=module)
|
||||||
|
|
||||||
|
|
||||||
|
def add_acme_provider_to_argument_spec(argument_spec: ArgumentSpec) -> None:
|
||||||
|
argument_spec.argument_spec["provider"]["choices"].append("acme")
|
||||||
|
argument_spec.argument_spec.update(
|
||||||
|
{
|
||||||
|
"acme_accountkey_path": {"type": "path"},
|
||||||
|
"acme_challenge_path": {"type": "path"},
|
||||||
|
"acme_chain": {"type": "bool", "default": False},
|
||||||
|
"acme_directory": {
|
||||||
|
"type": "str",
|
||||||
|
"default": "https://acme-v02.api.letsencrypt.org/directory",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"AcmeCertificateBackend",
|
||||||
|
"AcmeCertificateProvider",
|
||||||
|
"add_acme_provider_to_argument_spec",
|
||||||
|
)
|
||||||
@@ -0,0 +1,297 @@
|
|||||||
|
# Copyright (c) 2016-2017, Yanis Guenane <yanis+ansible@guenane.org>
|
||||||
|
# Copyright (c) 2017, Markus Teufelberger <mteufelberger+ansible@mgit.at>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import os
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_support import (
|
||||||
|
CRYPTOGRAPHY_TIMEZONE,
|
||||||
|
get_not_valid_after,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.certificate import (
|
||||||
|
CertificateBackend,
|
||||||
|
CertificateError,
|
||||||
|
CertificateProvider,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.support import (
|
||||||
|
load_certificate,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._ecs.api import (
|
||||||
|
ECSClient,
|
||||||
|
RestOperationException,
|
||||||
|
SessionConfigurationException,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._time import (
|
||||||
|
get_now_datetime,
|
||||||
|
get_relative_time_option,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._argspec import (
|
||||||
|
ArgumentSpec,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from cryptography.x509.oid import NameOID
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class EntrustCertificateBackend(CertificateBackend):
|
||||||
|
def __init__(self, *, module: AnsibleModule) -> None:
|
||||||
|
super().__init__(module=module)
|
||||||
|
self.trackingId = None
|
||||||
|
self.notAfter = get_relative_time_option(
|
||||||
|
module.params["entrust_not_after"],
|
||||||
|
input_name="entrust_not_after",
|
||||||
|
with_timezone=CRYPTOGRAPHY_TIMEZONE,
|
||||||
|
)
|
||||||
|
self.cert_bytes: bytes | None = None
|
||||||
|
|
||||||
|
if self.csr_content is None:
|
||||||
|
if self.csr_path is None:
|
||||||
|
raise CertificateError(
|
||||||
|
"csr_path or csr_content is required for entrust provider"
|
||||||
|
)
|
||||||
|
if not os.path.exists(self.csr_path):
|
||||||
|
raise CertificateError(
|
||||||
|
f"The certificate signing request file {self.csr_path} does not exist"
|
||||||
|
)
|
||||||
|
|
||||||
|
self._ensure_csr_loaded()
|
||||||
|
if self.csr is None:
|
||||||
|
raise CertificateError("CSR not provided")
|
||||||
|
|
||||||
|
# ECS API defaults to using the validated organization tied to the account.
|
||||||
|
# We want to always force behavior of trying to use the organization provided in the CSR.
|
||||||
|
# To that end we need to parse out the organization from the CSR.
|
||||||
|
self.csr_org = None
|
||||||
|
csr_subject_orgs = self.csr.subject.get_attributes_for_oid(
|
||||||
|
NameOID.ORGANIZATION_NAME
|
||||||
|
)
|
||||||
|
if len(csr_subject_orgs) == 1:
|
||||||
|
self.csr_org = csr_subject_orgs[0].value
|
||||||
|
elif len(csr_subject_orgs) > 1:
|
||||||
|
self.module.fail_json(
|
||||||
|
msg=(
|
||||||
|
"Entrust provider does not currently support multiple validated organizations. Multiple organizations found in "
|
||||||
|
f"Subject DN: '{self.csr.subject}'. "
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# If no organization in the CSR, explicitly tell ECS that it should be blank in issued cert, not defaulted to
|
||||||
|
# organization tied to the account.
|
||||||
|
if self.csr_org is None:
|
||||||
|
self.csr_org = ""
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.ecs_client = ECSClient(
|
||||||
|
entrust_api_user=self.module.params["entrust_api_user"],
|
||||||
|
entrust_api_key=self.module.params["entrust_api_key"],
|
||||||
|
entrust_api_cert=self.module.params["entrust_api_client_cert_path"],
|
||||||
|
entrust_api_cert_key=self.module.params[
|
||||||
|
"entrust_api_client_cert_key_path"
|
||||||
|
],
|
||||||
|
entrust_api_specification_path=self.module.params[
|
||||||
|
"entrust_api_specification_path"
|
||||||
|
],
|
||||||
|
)
|
||||||
|
except SessionConfigurationException as e:
|
||||||
|
module.fail_json(msg=f"Failed to initialize Entrust Provider: {e}")
|
||||||
|
|
||||||
|
def generate_certificate(self) -> None:
|
||||||
|
"""(Re-)Generate certificate."""
|
||||||
|
body = {}
|
||||||
|
|
||||||
|
# Read the CSR that was generated for us
|
||||||
|
if self.csr_content is not None:
|
||||||
|
# csr_content contains bytes
|
||||||
|
body["csr"] = to_text(self.csr_content)
|
||||||
|
else:
|
||||||
|
assert self.csr_path is not None
|
||||||
|
with open(self.csr_path, "r", encoding="utf-8") as csr_file:
|
||||||
|
body["csr"] = csr_file.read()
|
||||||
|
|
||||||
|
body["certType"] = self.module.params["entrust_cert_type"]
|
||||||
|
|
||||||
|
# Handle expiration (30 days if not specified)
|
||||||
|
expiry = self.notAfter
|
||||||
|
if not expiry:
|
||||||
|
gmt_now = get_now_datetime(with_timezone=CRYPTOGRAPHY_TIMEZONE)
|
||||||
|
expiry = gmt_now + datetime.timedelta(days=365)
|
||||||
|
|
||||||
|
expiry_iso3339 = expiry.strftime("%Y-%m-%dT%H:%M:%S.00Z")
|
||||||
|
body["certExpiryDate"] = expiry_iso3339
|
||||||
|
body["org"] = self.csr_org
|
||||||
|
body["tracking"] = {
|
||||||
|
"requesterName": self.module.params["entrust_requester_name"],
|
||||||
|
"requesterEmail": self.module.params["entrust_requester_email"],
|
||||||
|
"requesterPhone": self.module.params["entrust_requester_phone"],
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = self.ecs_client.NewCertRequest( # type: ignore[attr-defined] # pylint: disable=no-member
|
||||||
|
Body=body
|
||||||
|
)
|
||||||
|
self.trackingId = result.get("trackingId")
|
||||||
|
except RestOperationException as e:
|
||||||
|
self.module.fail_json(
|
||||||
|
msg=f"Failed to request new certificate from Entrust Certificate Services (ECS): {e.message}"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.cert_bytes = to_bytes(result.get("endEntityCert"))
|
||||||
|
self.cert = load_certificate(
|
||||||
|
path=None,
|
||||||
|
content=self.cert_bytes,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_certificate_data(self) -> bytes:
|
||||||
|
"""Return bytes for self.cert."""
|
||||||
|
if self.cert_bytes is None:
|
||||||
|
raise AssertionError("Contract violation: cert_bytes not set")
|
||||||
|
return self.cert_bytes
|
||||||
|
|
||||||
|
def needs_regeneration(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
not_before: datetime.datetime | None = None,
|
||||||
|
not_after: datetime.datetime | None = None,
|
||||||
|
) -> bool:
|
||||||
|
parent_check = super().needs_regeneration()
|
||||||
|
|
||||||
|
try:
|
||||||
|
cert_details = self._get_cert_details()
|
||||||
|
except RestOperationException as e:
|
||||||
|
self.module.fail_json(
|
||||||
|
msg=f"Failed to get status of existing certificate from Entrust Certificate Services (ECS): {e.message}."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Always issue a new certificate if the certificate is expired, suspended or revoked
|
||||||
|
status = cert_details.get("status", False)
|
||||||
|
if status in ("EXPIRED", "SUSPENDED", "REVOKED"):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# If the requested cert type was specified and it is for a different certificate type than the initial certificate, a new one is needed
|
||||||
|
if (
|
||||||
|
self.module.params["entrust_cert_type"]
|
||||||
|
and cert_details.get("certType")
|
||||||
|
and self.module.params["entrust_cert_type"] != cert_details.get("certType")
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return parent_check
|
||||||
|
|
||||||
|
def _get_cert_details(self) -> dict[str, t.Any]:
|
||||||
|
cert_details: dict[str, t.Any] = {}
|
||||||
|
try:
|
||||||
|
self._ensure_existing_certificate_loaded()
|
||||||
|
except Exception:
|
||||||
|
return cert_details
|
||||||
|
if self.existing_certificate:
|
||||||
|
serial_number = f"{self.existing_certificate.serial_number:X}"
|
||||||
|
expiry = get_not_valid_after(self.existing_certificate)
|
||||||
|
|
||||||
|
# get some information about the expiry of this certificate
|
||||||
|
expiry_iso3339 = expiry.strftime("%Y-%m-%dT%H:%M:%S.00Z")
|
||||||
|
cert_details["expiresAfter"] = expiry_iso3339
|
||||||
|
|
||||||
|
# If a trackingId is not already defined (from the result of a generate)
|
||||||
|
# use the serial number to identify the tracking Id
|
||||||
|
if self.trackingId is None and serial_number is not None:
|
||||||
|
cert_results = self.ecs_client.GetCertificates( # type: ignore[attr-defined] # pylint: disable=no-member
|
||||||
|
serialNumber=serial_number
|
||||||
|
).get(
|
||||||
|
"certificates", {}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Finding 0 or more than 1 result is a very unlikely use case, it simply means we cannot perform additional checks
|
||||||
|
# on the 'state' as returned by Entrust Certificate Services (ECS). The general certificate validity is
|
||||||
|
# still checked as it is in the rest of the module.
|
||||||
|
if len(cert_results) == 1:
|
||||||
|
self.trackingId = cert_results[0].get("trackingId")
|
||||||
|
|
||||||
|
if self.trackingId is not None:
|
||||||
|
cert_details.update(
|
||||||
|
self.ecs_client.GetCertificate( # pylint: disable=no-member
|
||||||
|
trackingId=self.trackingId
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return cert_details
|
||||||
|
|
||||||
|
|
||||||
|
class EntrustCertificateProvider(CertificateProvider):
|
||||||
|
def validate_module_args(self, module: AnsibleModule) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def create_backend(self, module: AnsibleModule) -> EntrustCertificateBackend:
|
||||||
|
return EntrustCertificateBackend(module=module)
|
||||||
|
|
||||||
|
|
||||||
|
def add_entrust_provider_to_argument_spec(argument_spec: ArgumentSpec) -> None:
|
||||||
|
argument_spec.argument_spec["provider"]["choices"].append("entrust")
|
||||||
|
argument_spec.argument_spec.update(
|
||||||
|
{
|
||||||
|
"entrust_cert_type": {
|
||||||
|
"type": "str",
|
||||||
|
"default": "STANDARD_SSL",
|
||||||
|
"choices": [
|
||||||
|
"STANDARD_SSL",
|
||||||
|
"ADVANTAGE_SSL",
|
||||||
|
"UC_SSL",
|
||||||
|
"EV_SSL",
|
||||||
|
"WILDCARD_SSL",
|
||||||
|
"PRIVATE_SSL",
|
||||||
|
"PD_SSL",
|
||||||
|
"CDS_ENT_LITE",
|
||||||
|
"CDS_ENT_PRO",
|
||||||
|
"SMIME_ENT",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"entrust_requester_email": {"type": "str"},
|
||||||
|
"entrust_requester_name": {"type": "str"},
|
||||||
|
"entrust_requester_phone": {"type": "str"},
|
||||||
|
"entrust_api_user": {"type": "str"},
|
||||||
|
"entrust_api_key": {"type": "str", "no_log": True},
|
||||||
|
"entrust_api_client_cert_path": {"type": "path"},
|
||||||
|
"entrust_api_client_cert_key_path": {"type": "path", "no_log": True},
|
||||||
|
"entrust_api_specification_path": {
|
||||||
|
"type": "path",
|
||||||
|
"default": "https://cloud.entrust.net/EntrustCloud/documentation/cms-api-2.1.0.yaml",
|
||||||
|
},
|
||||||
|
"entrust_not_after": {"type": "str", "default": "+365d"},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
argument_spec.required_if.append(
|
||||||
|
(
|
||||||
|
"provider",
|
||||||
|
"entrust",
|
||||||
|
[
|
||||||
|
"entrust_requester_email",
|
||||||
|
"entrust_requester_name",
|
||||||
|
"entrust_requester_phone",
|
||||||
|
"entrust_api_user",
|
||||||
|
"entrust_api_key",
|
||||||
|
"entrust_api_client_cert_path",
|
||||||
|
"entrust_api_client_cert_key_path",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"EntrustCertificateBackend",
|
||||||
|
"EntrustCertificateProvider",
|
||||||
|
"add_entrust_provider_to_argument_spec",
|
||||||
|
)
|
||||||
487
plugins/module_utils/_crypto/module_backends/certificate_info.py
Normal file
487
plugins/module_utils/_crypto/module_backends/certificate_info.py
Normal file
@@ -0,0 +1,487 @@
|
|||||||
|
# Copyright (c) 2016-2017, Yanis Guenane <yanis+ansible@guenane.org>
|
||||||
|
# Copyright (c) 2017, Markus Teufelberger <mteufelberger+ansible@mgit.at>
|
||||||
|
# Copyright (c) 2020, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import binascii
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_support import (
|
||||||
|
CRYPTOGRAPHY_TIMEZONE,
|
||||||
|
cryptography_decode_name,
|
||||||
|
cryptography_get_extensions_from_cert,
|
||||||
|
cryptography_oid_to_name,
|
||||||
|
get_not_valid_after,
|
||||||
|
get_not_valid_before,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.publickey_info import (
|
||||||
|
get_publickey_info,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.support import (
|
||||||
|
get_fingerprint_of_bytes,
|
||||||
|
load_certificate,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._cryptography_dep import (
|
||||||
|
COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION,
|
||||||
|
assert_required_cryptography_version,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._time import (
|
||||||
|
get_now_datetime,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._argspec import (
|
||||||
|
ArgumentSpec,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._action_module import (
|
||||||
|
AnsibleActionModule,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._filter_module import (
|
||||||
|
FilterModuleMock,
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import PublicKeyTypes
|
||||||
|
|
||||||
|
GeneralAnsibleModule = t.Union[AnsibleModule, AnsibleActionModule, FilterModuleMock]
|
||||||
|
|
||||||
|
|
||||||
|
MINIMAL_CRYPTOGRAPHY_VERSION = COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cryptography
|
||||||
|
from cryptography import x509
|
||||||
|
from cryptography.hazmat.primitives import serialization
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
TIMESTAMP_FORMAT = "%Y%m%d%H%M%SZ"
|
||||||
|
|
||||||
|
|
||||||
|
class CertificateInfoRetrieval(metaclass=abc.ABCMeta):
|
||||||
|
cert: x509.Certificate
|
||||||
|
|
||||||
|
def __init__(self, *, module: GeneralAnsibleModule, content: bytes) -> None:
|
||||||
|
# content must be a bytes string
|
||||||
|
self.module = module
|
||||||
|
self.content = content
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_der_bytes(self) -> bytes:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_signature_algorithm(self) -> str:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_subject_ordered(self) -> list[list[str]]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_issuer_ordered(self) -> list[list[str]]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_version(self) -> int | str:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_key_usage(self) -> tuple[list[str] | None, bool]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_extended_key_usage(self) -> tuple[list[str] | None, bool]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_basic_constraints(self) -> tuple[list[str] | None, bool]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_ocsp_must_staple(self) -> tuple[bool | None, bool]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_subject_alt_name(self) -> tuple[list[str] | None, bool]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def get_not_before(self) -> datetime.datetime:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def get_not_after(self) -> datetime.datetime:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_public_key_pem(self) -> bytes:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_public_key_object(self) -> PublicKeyTypes:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_subject_key_identifier(self) -> bytes | None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_authority_key_identifier(
|
||||||
|
self,
|
||||||
|
) -> tuple[bytes | None, list[str] | None, int | None]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_serial_number(self) -> int:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_all_extensions(self) -> dict[str, dict[str, bool | str]]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_ocsp_uri(self) -> str | None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_issuer_uri(self) -> str | None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_info(
|
||||||
|
self, *, prefer_one_fingerprint: bool = False, der_support_enabled: bool = False
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
result: dict[str, t.Any] = {}
|
||||||
|
self.cert = load_certificate(
|
||||||
|
content=self.content,
|
||||||
|
der_support_enabled=der_support_enabled,
|
||||||
|
)
|
||||||
|
|
||||||
|
result["signature_algorithm"] = self._get_signature_algorithm()
|
||||||
|
subject = self._get_subject_ordered()
|
||||||
|
issuer = self._get_issuer_ordered()
|
||||||
|
result["subject"] = {}
|
||||||
|
for k, v in subject:
|
||||||
|
result["subject"][k] = v
|
||||||
|
result["subject_ordered"] = subject
|
||||||
|
result["issuer"] = {}
|
||||||
|
for k, v in issuer:
|
||||||
|
result["issuer"][k] = v
|
||||||
|
result["issuer_ordered"] = issuer
|
||||||
|
result["version"] = self._get_version()
|
||||||
|
result["key_usage"], result["key_usage_critical"] = self._get_key_usage()
|
||||||
|
result["extended_key_usage"], result["extended_key_usage_critical"] = (
|
||||||
|
self._get_extended_key_usage()
|
||||||
|
)
|
||||||
|
result["basic_constraints"], result["basic_constraints_critical"] = (
|
||||||
|
self._get_basic_constraints()
|
||||||
|
)
|
||||||
|
result["ocsp_must_staple"], result["ocsp_must_staple_critical"] = (
|
||||||
|
self._get_ocsp_must_staple()
|
||||||
|
)
|
||||||
|
result["subject_alt_name"], result["subject_alt_name_critical"] = (
|
||||||
|
self._get_subject_alt_name()
|
||||||
|
)
|
||||||
|
|
||||||
|
not_before = self.get_not_before()
|
||||||
|
not_after = self.get_not_after()
|
||||||
|
result["not_before"] = not_before.strftime(TIMESTAMP_FORMAT)
|
||||||
|
result["not_after"] = not_after.strftime(TIMESTAMP_FORMAT)
|
||||||
|
result["expired"] = not_after < get_now_datetime(
|
||||||
|
with_timezone=CRYPTOGRAPHY_TIMEZONE
|
||||||
|
)
|
||||||
|
|
||||||
|
result["public_key"] = to_text(self._get_public_key_pem())
|
||||||
|
|
||||||
|
public_key_info = get_publickey_info(
|
||||||
|
module=self.module,
|
||||||
|
key=self._get_public_key_object(),
|
||||||
|
prefer_one_fingerprint=prefer_one_fingerprint,
|
||||||
|
)
|
||||||
|
result.update(
|
||||||
|
{
|
||||||
|
"public_key_type": public_key_info["type"],
|
||||||
|
"public_key_data": public_key_info["public_data"],
|
||||||
|
"public_key_fingerprints": public_key_info["fingerprints"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
result["fingerprints"] = get_fingerprint_of_bytes(
|
||||||
|
self._get_der_bytes(), prefer_one=prefer_one_fingerprint
|
||||||
|
)
|
||||||
|
|
||||||
|
ski_bytes = self._get_subject_key_identifier()
|
||||||
|
if ski_bytes is not None:
|
||||||
|
ski = binascii.hexlify(ski_bytes).decode("ascii")
|
||||||
|
ski = ":".join([ski[i : i + 2] for i in range(0, len(ski), 2)])
|
||||||
|
else:
|
||||||
|
ski = None
|
||||||
|
result["subject_key_identifier"] = ski
|
||||||
|
|
||||||
|
aki_bytes, aci, acsn = self._get_authority_key_identifier()
|
||||||
|
if aki_bytes is not None:
|
||||||
|
aki = binascii.hexlify(aki_bytes).decode("ascii")
|
||||||
|
aki = ":".join([aki[i : i + 2] for i in range(0, len(aki), 2)])
|
||||||
|
else:
|
||||||
|
aki = None
|
||||||
|
result["authority_key_identifier"] = aki
|
||||||
|
result["authority_cert_issuer"] = aci
|
||||||
|
result["authority_cert_serial_number"] = acsn
|
||||||
|
|
||||||
|
result["serial_number"] = self._get_serial_number()
|
||||||
|
result["extensions_by_oid"] = self._get_all_extensions()
|
||||||
|
result["ocsp_uri"] = self._get_ocsp_uri()
|
||||||
|
result["issuer_uri"] = self._get_issuer_uri()
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class CertificateInfoRetrievalCryptography(CertificateInfoRetrieval):
|
||||||
|
"""Validate the supplied cert, using the cryptography backend"""
|
||||||
|
|
||||||
|
def __init__(self, *, module: GeneralAnsibleModule, content: bytes) -> None:
|
||||||
|
super().__init__(module=module, content=content)
|
||||||
|
self.name_encoding = module.params.get("name_encoding", "ignore")
|
||||||
|
|
||||||
|
def _get_der_bytes(self) -> bytes:
|
||||||
|
return self.cert.public_bytes(serialization.Encoding.DER)
|
||||||
|
|
||||||
|
def _get_signature_algorithm(self) -> str:
|
||||||
|
return cryptography_oid_to_name(self.cert.signature_algorithm_oid)
|
||||||
|
|
||||||
|
def _get_subject_ordered(self) -> list[list[str]]:
|
||||||
|
result: list[list[str]] = []
|
||||||
|
for attribute in self.cert.subject:
|
||||||
|
result.append(
|
||||||
|
[cryptography_oid_to_name(attribute.oid), to_text(attribute.value)]
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _get_issuer_ordered(self) -> list[list[str]]:
|
||||||
|
result = []
|
||||||
|
for attribute in self.cert.issuer:
|
||||||
|
result.append(
|
||||||
|
[cryptography_oid_to_name(attribute.oid), to_text(attribute.value)]
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _get_version(self) -> int | str:
|
||||||
|
if self.cert.version == x509.Version.v1:
|
||||||
|
return 1
|
||||||
|
if self.cert.version == x509.Version.v3:
|
||||||
|
return 3
|
||||||
|
return "unknown"
|
||||||
|
|
||||||
|
def _get_key_usage(self) -> tuple[list[str] | None, bool]:
|
||||||
|
try:
|
||||||
|
current_key_ext = self.cert.extensions.get_extension_for_class(
|
||||||
|
x509.KeyUsage
|
||||||
|
)
|
||||||
|
current_key_usage = current_key_ext.value
|
||||||
|
key_usage = {
|
||||||
|
"digital_signature": current_key_usage.digital_signature,
|
||||||
|
"content_commitment": current_key_usage.content_commitment,
|
||||||
|
"key_encipherment": current_key_usage.key_encipherment,
|
||||||
|
"data_encipherment": current_key_usage.data_encipherment,
|
||||||
|
"key_agreement": current_key_usage.key_agreement,
|
||||||
|
"key_cert_sign": current_key_usage.key_cert_sign,
|
||||||
|
"crl_sign": current_key_usage.crl_sign,
|
||||||
|
"encipher_only": False,
|
||||||
|
"decipher_only": False,
|
||||||
|
}
|
||||||
|
if key_usage["key_agreement"]:
|
||||||
|
key_usage.update(
|
||||||
|
{
|
||||||
|
"encipher_only": current_key_usage.encipher_only,
|
||||||
|
"decipher_only": current_key_usage.decipher_only,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
key_usage_names = {
|
||||||
|
"digital_signature": "Digital Signature",
|
||||||
|
"content_commitment": "Non Repudiation",
|
||||||
|
"key_encipherment": "Key Encipherment",
|
||||||
|
"data_encipherment": "Data Encipherment",
|
||||||
|
"key_agreement": "Key Agreement",
|
||||||
|
"key_cert_sign": "Certificate Sign",
|
||||||
|
"crl_sign": "CRL Sign",
|
||||||
|
"encipher_only": "Encipher Only",
|
||||||
|
"decipher_only": "Decipher Only",
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
sorted(
|
||||||
|
[
|
||||||
|
key_usage_names[name]
|
||||||
|
for name, value in key_usage.items()
|
||||||
|
if value
|
||||||
|
]
|
||||||
|
),
|
||||||
|
current_key_ext.critical,
|
||||||
|
)
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return None, False
|
||||||
|
|
||||||
|
def _get_extended_key_usage(self) -> tuple[list[str] | None, bool]:
|
||||||
|
try:
|
||||||
|
ext_keyusage_ext = self.cert.extensions.get_extension_for_class(
|
||||||
|
x509.ExtendedKeyUsage
|
||||||
|
)
|
||||||
|
return (
|
||||||
|
sorted(
|
||||||
|
[cryptography_oid_to_name(eku) for eku in ext_keyusage_ext.value]
|
||||||
|
),
|
||||||
|
ext_keyusage_ext.critical,
|
||||||
|
)
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return None, False
|
||||||
|
|
||||||
|
def _get_basic_constraints(self) -> tuple[list[str] | None, bool]:
|
||||||
|
try:
|
||||||
|
ext_keyusage_ext = self.cert.extensions.get_extension_for_class(
|
||||||
|
x509.BasicConstraints
|
||||||
|
)
|
||||||
|
result = []
|
||||||
|
result.append(f"CA:{'TRUE' if ext_keyusage_ext.value.ca else 'FALSE'}")
|
||||||
|
if ext_keyusage_ext.value.path_length is not None:
|
||||||
|
result.append(f"pathlen:{ext_keyusage_ext.value.path_length}")
|
||||||
|
return sorted(result), ext_keyusage_ext.critical
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return None, False
|
||||||
|
|
||||||
|
def _get_ocsp_must_staple(self) -> tuple[bool | None, bool]:
|
||||||
|
try:
|
||||||
|
tlsfeature_ext = self.cert.extensions.get_extension_for_class(
|
||||||
|
x509.TLSFeature
|
||||||
|
)
|
||||||
|
value = (
|
||||||
|
cryptography.x509.TLSFeatureType.status_request in tlsfeature_ext.value
|
||||||
|
)
|
||||||
|
return value, tlsfeature_ext.critical
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return None, False
|
||||||
|
|
||||||
|
def _get_subject_alt_name(self) -> tuple[list[str] | None, bool]:
|
||||||
|
try:
|
||||||
|
san_ext = self.cert.extensions.get_extension_for_class(
|
||||||
|
x509.SubjectAlternativeName
|
||||||
|
)
|
||||||
|
result = [
|
||||||
|
cryptography_decode_name(san, idn_rewrite=self.name_encoding)
|
||||||
|
for san in san_ext.value
|
||||||
|
]
|
||||||
|
return result, san_ext.critical
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return None, False
|
||||||
|
|
||||||
|
def get_not_before(self) -> datetime.datetime:
|
||||||
|
return get_not_valid_before(self.cert)
|
||||||
|
|
||||||
|
def get_not_after(self) -> datetime.datetime:
|
||||||
|
return get_not_valid_after(self.cert)
|
||||||
|
|
||||||
|
def _get_public_key_pem(self) -> bytes:
|
||||||
|
return self.cert.public_key().public_bytes(
|
||||||
|
serialization.Encoding.PEM,
|
||||||
|
serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_public_key_object(self) -> PublicKeyTypes:
|
||||||
|
return self.cert.public_key()
|
||||||
|
|
||||||
|
def _get_subject_key_identifier(self) -> bytes | None:
|
||||||
|
try:
|
||||||
|
ext = self.cert.extensions.get_extension_for_class(
|
||||||
|
x509.SubjectKeyIdentifier
|
||||||
|
)
|
||||||
|
return ext.value.digest
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _get_authority_key_identifier(
|
||||||
|
self,
|
||||||
|
) -> tuple[bytes | None, list[str] | None, int | None]:
|
||||||
|
try:
|
||||||
|
ext = self.cert.extensions.get_extension_for_class(
|
||||||
|
x509.AuthorityKeyIdentifier
|
||||||
|
)
|
||||||
|
issuer = None
|
||||||
|
if ext.value.authority_cert_issuer is not None:
|
||||||
|
issuer = [
|
||||||
|
cryptography_decode_name(san, idn_rewrite=self.name_encoding)
|
||||||
|
for san in ext.value.authority_cert_issuer
|
||||||
|
]
|
||||||
|
return (
|
||||||
|
ext.value.key_identifier,
|
||||||
|
issuer,
|
||||||
|
ext.value.authority_cert_serial_number,
|
||||||
|
)
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
|
def _get_serial_number(self) -> int:
|
||||||
|
return self.cert.serial_number
|
||||||
|
|
||||||
|
def _get_all_extensions(self) -> dict[str, dict[str, bool | str]]:
|
||||||
|
return cryptography_get_extensions_from_cert(self.cert)
|
||||||
|
|
||||||
|
def _get_ocsp_uri(self) -> str | None:
|
||||||
|
try:
|
||||||
|
ext = self.cert.extensions.get_extension_for_class(
|
||||||
|
x509.AuthorityInformationAccess
|
||||||
|
)
|
||||||
|
for desc in ext.value:
|
||||||
|
if desc.access_method == x509.oid.AuthorityInformationAccessOID.OCSP:
|
||||||
|
if isinstance(desc.access_location, x509.UniformResourceIdentifier):
|
||||||
|
return desc.access_location.value
|
||||||
|
except x509.ExtensionNotFound:
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _get_issuer_uri(self) -> str | None:
|
||||||
|
try:
|
||||||
|
ext = self.cert.extensions.get_extension_for_class(
|
||||||
|
x509.AuthorityInformationAccess
|
||||||
|
)
|
||||||
|
for desc in ext.value:
|
||||||
|
if (
|
||||||
|
desc.access_method
|
||||||
|
== x509.oid.AuthorityInformationAccessOID.CA_ISSUERS
|
||||||
|
):
|
||||||
|
if isinstance(desc.access_location, x509.UniformResourceIdentifier):
|
||||||
|
return desc.access_location.value
|
||||||
|
except x509.ExtensionNotFound:
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_certificate_info(
|
||||||
|
*,
|
||||||
|
module: GeneralAnsibleModule,
|
||||||
|
content: bytes,
|
||||||
|
prefer_one_fingerprint: bool = False,
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
info = CertificateInfoRetrievalCryptography(module=module, content=content)
|
||||||
|
return info.get_info(prefer_one_fingerprint=prefer_one_fingerprint)
|
||||||
|
|
||||||
|
|
||||||
|
def select_backend(
|
||||||
|
*, module: GeneralAnsibleModule, content: bytes
|
||||||
|
) -> CertificateInfoRetrieval:
|
||||||
|
assert_required_cryptography_version(
|
||||||
|
module, minimum_cryptography_version=MINIMAL_CRYPTOGRAPHY_VERSION
|
||||||
|
)
|
||||||
|
return CertificateInfoRetrievalCryptography(module=module, content=content)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("CertificateInfoRetrieval", "get_certificate_info", "select_backend")
|
||||||
@@ -0,0 +1,374 @@
|
|||||||
|
# Copyright (c) 2016-2017, Yanis Guenane <yanis+ansible@guenane.org>
|
||||||
|
# Copyright (c) 2017, Markus Teufelberger <mteufelberger+ansible@mgit.at>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import typing as t
|
||||||
|
from random import randrange
|
||||||
|
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
|
OpenSSLBadPassphraseError,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_support import (
|
||||||
|
CRYPTOGRAPHY_TIMEZONE,
|
||||||
|
cryptography_compare_public_keys,
|
||||||
|
cryptography_key_needs_digest_for_signing,
|
||||||
|
cryptography_verify_certificate_signature,
|
||||||
|
get_not_valid_after,
|
||||||
|
get_not_valid_before,
|
||||||
|
is_potential_certificate_issuer_public_key,
|
||||||
|
set_not_valid_after,
|
||||||
|
set_not_valid_before,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.certificate import (
|
||||||
|
CertificateBackend,
|
||||||
|
CertificateError,
|
||||||
|
CertificateProvider,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.support import (
|
||||||
|
load_certificate,
|
||||||
|
load_certificate_issuer_privatekey,
|
||||||
|
select_message_digest,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._time import (
|
||||||
|
get_relative_time_option,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._argspec import (
|
||||||
|
ArgumentSpec,
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import (
|
||||||
|
CertificateIssuerPrivateKeyTypes,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cryptography
|
||||||
|
from cryptography import x509
|
||||||
|
from cryptography.hazmat.primitives.serialization import Encoding
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class OwnCACertificateBackendCryptography(CertificateBackend):
|
||||||
|
def __init__(self, *, module: AnsibleModule) -> None:
|
||||||
|
super().__init__(module=module)
|
||||||
|
|
||||||
|
self.create_subject_key_identifier: t.Literal[
|
||||||
|
"create_if_not_provided", "always_create", "never_create"
|
||||||
|
] = module.params["ownca_create_subject_key_identifier"]
|
||||||
|
self.create_authority_key_identifier: bool = module.params[
|
||||||
|
"ownca_create_authority_key_identifier"
|
||||||
|
]
|
||||||
|
self.notBefore = get_relative_time_option(
|
||||||
|
module.params["ownca_not_before"],
|
||||||
|
input_name="ownca_not_before",
|
||||||
|
with_timezone=CRYPTOGRAPHY_TIMEZONE,
|
||||||
|
)
|
||||||
|
self.notAfter = get_relative_time_option(
|
||||||
|
module.params["ownca_not_after"],
|
||||||
|
input_name="ownca_not_after",
|
||||||
|
with_timezone=CRYPTOGRAPHY_TIMEZONE,
|
||||||
|
)
|
||||||
|
self.digest = select_message_digest(module.params["ownca_digest"])
|
||||||
|
self.serial_number = x509.random_serial_number()
|
||||||
|
self.ca_cert_path: str | None = module.params["ownca_path"]
|
||||||
|
ca_cert_content: str | None = module.params["ownca_content"]
|
||||||
|
if ca_cert_content is not None:
|
||||||
|
self.ca_cert_content: bytes | None = ca_cert_content.encode("utf-8")
|
||||||
|
else:
|
||||||
|
self.ca_cert_content = None
|
||||||
|
self.ca_privatekey_path: str | None = module.params["ownca_privatekey_path"]
|
||||||
|
ca_privatekey_content: str | None = module.params["ownca_privatekey_content"]
|
||||||
|
if ca_privatekey_content is not None:
|
||||||
|
self.ca_privatekey_content: bytes | None = ca_privatekey_content.encode(
|
||||||
|
"utf-8"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.ca_privatekey_content = None
|
||||||
|
self.ca_privatekey_passphrase: str | None = module.params[
|
||||||
|
"ownca_privatekey_passphrase"
|
||||||
|
]
|
||||||
|
|
||||||
|
if self.csr_content is None:
|
||||||
|
if self.csr_path is None:
|
||||||
|
raise CertificateError(
|
||||||
|
"csr_path or csr_content is required for ownca provider"
|
||||||
|
)
|
||||||
|
if not os.path.exists(self.csr_path):
|
||||||
|
raise CertificateError(
|
||||||
|
f"The certificate signing request file {self.csr_path} does not exist"
|
||||||
|
)
|
||||||
|
if self.ca_cert_path is not None and not os.path.exists(self.ca_cert_path):
|
||||||
|
raise CertificateError(
|
||||||
|
f"The CA certificate file {self.ca_cert_path} does not exist"
|
||||||
|
)
|
||||||
|
if self.ca_privatekey_path is not None and not os.path.exists(
|
||||||
|
self.ca_privatekey_path
|
||||||
|
):
|
||||||
|
raise CertificateError(
|
||||||
|
f"The CA private key file {self.ca_privatekey_path} does not exist"
|
||||||
|
)
|
||||||
|
|
||||||
|
self._ensure_csr_loaded()
|
||||||
|
self.ca_cert = load_certificate(
|
||||||
|
path=self.ca_cert_path,
|
||||||
|
content=self.ca_cert_content,
|
||||||
|
)
|
||||||
|
if not is_potential_certificate_issuer_public_key(self.ca_cert.public_key()):
|
||||||
|
raise CertificateError(
|
||||||
|
"CA certificate's public key cannot be used to sign certificates"
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
self.ca_private_key = load_certificate_issuer_privatekey(
|
||||||
|
path=self.ca_privatekey_path,
|
||||||
|
content=self.ca_privatekey_content,
|
||||||
|
passphrase=self.ca_privatekey_passphrase,
|
||||||
|
)
|
||||||
|
except OpenSSLBadPassphraseError as exc:
|
||||||
|
module.fail_json(msg=str(exc))
|
||||||
|
|
||||||
|
if not cryptography_compare_public_keys(
|
||||||
|
self.ca_cert.public_key(), self.ca_private_key.public_key()
|
||||||
|
):
|
||||||
|
raise CertificateError(
|
||||||
|
"The CA private key does not belong to the CA certificate"
|
||||||
|
)
|
||||||
|
|
||||||
|
if cryptography_key_needs_digest_for_signing(self.ca_private_key):
|
||||||
|
if self.digest is None:
|
||||||
|
raise CertificateError(
|
||||||
|
f"The digest {module.params['ownca_digest']} is not supported with the cryptography backend"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.digest = None
|
||||||
|
|
||||||
|
def generate_certificate(self) -> None:
|
||||||
|
"""(Re-)Generate certificate."""
|
||||||
|
if self.csr is None:
|
||||||
|
raise AssertionError("Contract violation: csr has not been populated")
|
||||||
|
cert_builder = x509.CertificateBuilder()
|
||||||
|
cert_builder = cert_builder.subject_name(self.csr.subject)
|
||||||
|
cert_builder = cert_builder.issuer_name(self.ca_cert.subject)
|
||||||
|
cert_builder = cert_builder.serial_number(self.serial_number)
|
||||||
|
cert_builder = set_not_valid_before(cert_builder, self.notBefore)
|
||||||
|
cert_builder = set_not_valid_after(cert_builder, self.notAfter)
|
||||||
|
cert_builder = cert_builder.public_key(self.csr.public_key())
|
||||||
|
has_ski = False
|
||||||
|
for extension in self.csr.extensions:
|
||||||
|
if isinstance(extension.value, x509.SubjectKeyIdentifier):
|
||||||
|
if self.create_subject_key_identifier == "always_create":
|
||||||
|
continue
|
||||||
|
has_ski = True
|
||||||
|
if self.create_authority_key_identifier and isinstance(
|
||||||
|
extension.value, x509.AuthorityKeyIdentifier
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
cert_builder = cert_builder.add_extension(
|
||||||
|
extension.value, critical=extension.critical
|
||||||
|
)
|
||||||
|
if not has_ski and self.create_subject_key_identifier != "never_create":
|
||||||
|
cert_builder = cert_builder.add_extension(
|
||||||
|
x509.SubjectKeyIdentifier.from_public_key(self.csr.public_key()),
|
||||||
|
critical=False,
|
||||||
|
)
|
||||||
|
if self.create_authority_key_identifier:
|
||||||
|
try:
|
||||||
|
ext = self.ca_cert.extensions.get_extension_for_class(
|
||||||
|
x509.SubjectKeyIdentifier
|
||||||
|
)
|
||||||
|
cert_builder = cert_builder.add_extension(
|
||||||
|
(
|
||||||
|
x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(
|
||||||
|
ext.value
|
||||||
|
)
|
||||||
|
),
|
||||||
|
critical=False,
|
||||||
|
)
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
public_key = self.ca_cert.public_key()
|
||||||
|
assert is_potential_certificate_issuer_public_key(public_key)
|
||||||
|
cert_builder = cert_builder.add_extension(
|
||||||
|
x509.AuthorityKeyIdentifier.from_issuer_public_key(public_key),
|
||||||
|
critical=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
certificate = cert_builder.sign(
|
||||||
|
private_key=self.ca_private_key,
|
||||||
|
algorithm=self.digest,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.cert = certificate
|
||||||
|
|
||||||
|
def get_certificate_data(self) -> bytes:
|
||||||
|
"""Return bytes for self.cert."""
|
||||||
|
if self.cert is None:
|
||||||
|
raise AssertionError("Contract violation: cert has not been populated")
|
||||||
|
return self.cert.public_bytes(Encoding.PEM)
|
||||||
|
|
||||||
|
def needs_regeneration(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
not_before: datetime.datetime | None = None,
|
||||||
|
not_after: datetime.datetime | None = None,
|
||||||
|
) -> bool:
|
||||||
|
if super().needs_regeneration(
|
||||||
|
not_before=self.notBefore, not_after=self.notAfter
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
|
||||||
|
self._ensure_existing_certificate_loaded()
|
||||||
|
assert self.existing_certificate is not None
|
||||||
|
|
||||||
|
# Check whether certificate is signed by CA certificate
|
||||||
|
if not cryptography_verify_certificate_signature(
|
||||||
|
certificate=self.existing_certificate,
|
||||||
|
signer_public_key=self.ca_cert.public_key(),
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check subject
|
||||||
|
if self.ca_cert.subject != self.existing_certificate.issuer:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check AuthorityKeyIdentifier
|
||||||
|
if self.create_authority_key_identifier:
|
||||||
|
try:
|
||||||
|
ext_ski = self.ca_cert.extensions.get_extension_for_class(
|
||||||
|
x509.SubjectKeyIdentifier
|
||||||
|
)
|
||||||
|
expected_ext = (
|
||||||
|
x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(
|
||||||
|
ext_ski.value
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
public_key = self.ca_cert.public_key()
|
||||||
|
assert is_potential_certificate_issuer_public_key(public_key)
|
||||||
|
expected_ext = x509.AuthorityKeyIdentifier.from_issuer_public_key(
|
||||||
|
public_key
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
ext_aki = self.existing_certificate.extensions.get_extension_for_class(
|
||||||
|
x509.AuthorityKeyIdentifier
|
||||||
|
)
|
||||||
|
if ext_aki.value != expected_ext:
|
||||||
|
return True
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def dump(self, *, include_certificate: bool) -> dict[str, t.Any]:
|
||||||
|
result = super().dump(include_certificate=include_certificate)
|
||||||
|
result.update(
|
||||||
|
{
|
||||||
|
"ca_cert": self.ca_cert_path,
|
||||||
|
"ca_privatekey": self.ca_privatekey_path,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.module.check_mode:
|
||||||
|
result.update(
|
||||||
|
{
|
||||||
|
"notBefore": self.notBefore.strftime("%Y%m%d%H%M%SZ"),
|
||||||
|
"notAfter": self.notAfter.strftime("%Y%m%d%H%M%SZ"),
|
||||||
|
"serial_number": self.serial_number,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if self.cert is None:
|
||||||
|
self.cert = self.existing_certificate
|
||||||
|
assert self.cert is not None
|
||||||
|
result.update(
|
||||||
|
{
|
||||||
|
"notBefore": get_not_valid_before(self.cert).strftime(
|
||||||
|
"%Y%m%d%H%M%SZ"
|
||||||
|
),
|
||||||
|
"notAfter": get_not_valid_after(self.cert).strftime(
|
||||||
|
"%Y%m%d%H%M%SZ"
|
||||||
|
),
|
||||||
|
"serial_number": self.cert.serial_number,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def generate_serial_number() -> int:
|
||||||
|
"""Generate a serial number for a certificate"""
|
||||||
|
while True:
|
||||||
|
result = randrange(0, 1 << 160)
|
||||||
|
if result >= 1000:
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class OwnCACertificateProvider(CertificateProvider):
|
||||||
|
def validate_module_args(self, module: AnsibleModule) -> None:
|
||||||
|
if (
|
||||||
|
module.params["ownca_path"] is None
|
||||||
|
and module.params["ownca_content"] is None
|
||||||
|
):
|
||||||
|
module.fail_json(
|
||||||
|
msg="One of ownca_path and ownca_content must be specified for the ownca provider."
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
module.params["ownca_privatekey_path"] is None
|
||||||
|
and module.params["ownca_privatekey_content"] is None
|
||||||
|
):
|
||||||
|
module.fail_json(
|
||||||
|
msg="One of ownca_privatekey_path and ownca_privatekey_content must be specified for the ownca provider."
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_backend(
|
||||||
|
self, module: AnsibleModule
|
||||||
|
) -> OwnCACertificateBackendCryptography:
|
||||||
|
return OwnCACertificateBackendCryptography(module=module)
|
||||||
|
|
||||||
|
|
||||||
|
def add_ownca_provider_to_argument_spec(argument_spec: ArgumentSpec) -> None:
|
||||||
|
argument_spec.argument_spec["provider"]["choices"].append("ownca")
|
||||||
|
argument_spec.argument_spec.update(
|
||||||
|
{
|
||||||
|
"ownca_path": {"type": "path"},
|
||||||
|
"ownca_content": {"type": "str"},
|
||||||
|
"ownca_privatekey_path": {"type": "path"},
|
||||||
|
"ownca_privatekey_content": {"type": "str", "no_log": True},
|
||||||
|
"ownca_privatekey_passphrase": {"type": "str", "no_log": True},
|
||||||
|
"ownca_digest": {"type": "str", "default": "sha256"},
|
||||||
|
"ownca_version": {"type": "int", "default": 3, "choices": [3]}, # not used
|
||||||
|
"ownca_not_before": {"type": "str", "default": "+0s"},
|
||||||
|
"ownca_not_after": {"type": "str", "default": "+3650d"},
|
||||||
|
"ownca_create_subject_key_identifier": {
|
||||||
|
"type": "str",
|
||||||
|
"default": "create_if_not_provided",
|
||||||
|
"choices": ["create_if_not_provided", "always_create", "never_create"],
|
||||||
|
},
|
||||||
|
"ownca_create_authority_key_identifier": {"type": "bool", "default": True},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
argument_spec.mutually_exclusive.extend(
|
||||||
|
[
|
||||||
|
["ownca_path", "ownca_content"],
|
||||||
|
["ownca_privatekey_path", "ownca_privatekey_content"],
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"OwnCACertificateBackendCryptography",
|
||||||
|
"OwnCACertificateProvider",
|
||||||
|
"add_ownca_provider_to_argument_spec",
|
||||||
|
)
|
||||||
@@ -0,0 +1,274 @@
|
|||||||
|
# Copyright (c) 2016-2017, Yanis Guenane <yanis+ansible@guenane.org>
|
||||||
|
# Copyright (c) 2017, Markus Teufelberger <mteufelberger+ansible@mgit.at>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import typing as t
|
||||||
|
from random import randrange
|
||||||
|
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_support import (
|
||||||
|
CRYPTOGRAPHY_TIMEZONE,
|
||||||
|
cryptography_key_needs_digest_for_signing,
|
||||||
|
cryptography_verify_certificate_signature,
|
||||||
|
get_not_valid_after,
|
||||||
|
get_not_valid_before,
|
||||||
|
is_potential_certificate_issuer_private_key,
|
||||||
|
set_not_valid_after,
|
||||||
|
set_not_valid_before,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.certificate import (
|
||||||
|
CertificateBackend,
|
||||||
|
CertificateError,
|
||||||
|
CertificateProvider,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.support import (
|
||||||
|
select_message_digest,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._time import (
|
||||||
|
get_relative_time_option,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._argspec import (
|
||||||
|
ArgumentSpec,
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import (
|
||||||
|
CertificateIssuerPrivateKeyTypes,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cryptography
|
||||||
|
from cryptography import x509
|
||||||
|
from cryptography.hazmat.primitives.serialization import Encoding
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SelfSignedCertificateBackendCryptography(CertificateBackend):
|
||||||
|
privatekey: CertificateIssuerPrivateKeyTypes
|
||||||
|
|
||||||
|
def __init__(self, *, module: AnsibleModule) -> None:
|
||||||
|
super().__init__(module=module)
|
||||||
|
|
||||||
|
self.create_subject_key_identifier: t.Literal[
|
||||||
|
"create_if_not_provided", "always_create", "never_create"
|
||||||
|
] = module.params["selfsigned_create_subject_key_identifier"]
|
||||||
|
self.notBefore = get_relative_time_option(
|
||||||
|
module.params["selfsigned_not_before"],
|
||||||
|
input_name="selfsigned_not_before",
|
||||||
|
with_timezone=CRYPTOGRAPHY_TIMEZONE,
|
||||||
|
)
|
||||||
|
self.notAfter = get_relative_time_option(
|
||||||
|
module.params["selfsigned_not_after"],
|
||||||
|
input_name="selfsigned_not_after",
|
||||||
|
with_timezone=CRYPTOGRAPHY_TIMEZONE,
|
||||||
|
)
|
||||||
|
self.digest = select_message_digest(module.params["selfsigned_digest"])
|
||||||
|
self.serial_number = x509.random_serial_number()
|
||||||
|
|
||||||
|
if self.csr_path is not None and not os.path.exists(self.csr_path):
|
||||||
|
raise CertificateError(
|
||||||
|
f"The certificate signing request file {self.csr_path} does not exist"
|
||||||
|
)
|
||||||
|
if self.privatekey_path is not None and not os.path.exists(
|
||||||
|
self.privatekey_path
|
||||||
|
):
|
||||||
|
raise CertificateError(
|
||||||
|
f"The private key file {self.privatekey_path} does not exist"
|
||||||
|
)
|
||||||
|
|
||||||
|
self._module = module
|
||||||
|
|
||||||
|
self._ensure_private_key_loaded()
|
||||||
|
if self.privatekey is None:
|
||||||
|
raise CertificateError("Private key has not been provided")
|
||||||
|
if not is_potential_certificate_issuer_private_key(self.privatekey):
|
||||||
|
raise CertificateError("Private key cannot be used to sign certificates")
|
||||||
|
|
||||||
|
if cryptography_key_needs_digest_for_signing(self.privatekey):
|
||||||
|
if self.digest is None:
|
||||||
|
raise CertificateError(
|
||||||
|
f"The digest {module.params['selfsigned_digest']} is not supported with the cryptography backend"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.digest = None
|
||||||
|
|
||||||
|
self._ensure_csr_loaded()
|
||||||
|
if self.csr is None:
|
||||||
|
# Create empty CSR on the fly
|
||||||
|
csr = cryptography.x509.CertificateSigningRequestBuilder()
|
||||||
|
csr = csr.subject_name(cryptography.x509.Name([]))
|
||||||
|
self.csr = csr.sign(self.privatekey, self.digest)
|
||||||
|
|
||||||
|
def generate_certificate(self) -> None:
|
||||||
|
"""(Re-)Generate certificate."""
|
||||||
|
if self.csr is None:
|
||||||
|
raise AssertionError("Contract violation: csr has not been populated")
|
||||||
|
if self.privatekey is None:
|
||||||
|
raise AssertionError(
|
||||||
|
"Contract violation: privatekey has not been populated"
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
cert_builder = x509.CertificateBuilder()
|
||||||
|
cert_builder = cert_builder.subject_name(self.csr.subject)
|
||||||
|
cert_builder = cert_builder.issuer_name(self.csr.subject)
|
||||||
|
cert_builder = cert_builder.serial_number(self.serial_number)
|
||||||
|
cert_builder = set_not_valid_before(cert_builder, self.notBefore)
|
||||||
|
cert_builder = set_not_valid_after(cert_builder, self.notAfter)
|
||||||
|
cert_builder = cert_builder.public_key(self.privatekey.public_key())
|
||||||
|
has_ski = False
|
||||||
|
for extension in self.csr.extensions:
|
||||||
|
if isinstance(extension.value, x509.SubjectKeyIdentifier):
|
||||||
|
if self.create_subject_key_identifier == "always_create":
|
||||||
|
continue
|
||||||
|
has_ski = True
|
||||||
|
cert_builder = cert_builder.add_extension(
|
||||||
|
extension.value, critical=extension.critical
|
||||||
|
)
|
||||||
|
if not has_ski and self.create_subject_key_identifier != "never_create":
|
||||||
|
cert_builder = cert_builder.add_extension(
|
||||||
|
x509.SubjectKeyIdentifier.from_public_key(
|
||||||
|
self.privatekey.public_key()
|
||||||
|
),
|
||||||
|
critical=False,
|
||||||
|
)
|
||||||
|
except ValueError as e:
|
||||||
|
raise CertificateError(str(e)) from e
|
||||||
|
|
||||||
|
certificate = cert_builder.sign(
|
||||||
|
private_key=self.privatekey,
|
||||||
|
algorithm=self.digest,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.cert = certificate
|
||||||
|
|
||||||
|
def get_certificate_data(self) -> bytes:
|
||||||
|
"""Return bytes for self.cert."""
|
||||||
|
if self.cert is None:
|
||||||
|
raise AssertionError("Contract violation: cert has not been populated")
|
||||||
|
return self.cert.public_bytes(Encoding.PEM)
|
||||||
|
|
||||||
|
def needs_regeneration(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
not_before: datetime.datetime | None = None,
|
||||||
|
not_after: datetime.datetime | None = None,
|
||||||
|
) -> bool:
|
||||||
|
assert self.privatekey is not None
|
||||||
|
|
||||||
|
if super().needs_regeneration(
|
||||||
|
not_before=self.notBefore, not_after=self.notAfter
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
|
||||||
|
self._ensure_existing_certificate_loaded()
|
||||||
|
assert self.existing_certificate is not None
|
||||||
|
|
||||||
|
# Check whether certificate is signed by private key
|
||||||
|
if not cryptography_verify_certificate_signature(
|
||||||
|
certificate=self.existing_certificate,
|
||||||
|
signer_public_key=self.privatekey.public_key(),
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def dump(self, *, include_certificate: bool) -> dict[str, t.Any]:
|
||||||
|
result = super().dump(include_certificate=include_certificate)
|
||||||
|
|
||||||
|
if self.module.check_mode:
|
||||||
|
result.update(
|
||||||
|
{
|
||||||
|
"notBefore": self.notBefore.strftime("%Y%m%d%H%M%SZ"),
|
||||||
|
"notAfter": self.notAfter.strftime("%Y%m%d%H%M%SZ"),
|
||||||
|
"serial_number": self.serial_number,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if self.cert is None:
|
||||||
|
self.cert = self.existing_certificate
|
||||||
|
assert self.cert is not None
|
||||||
|
result.update(
|
||||||
|
{
|
||||||
|
"notBefore": get_not_valid_before(self.cert).strftime(
|
||||||
|
"%Y%m%d%H%M%SZ"
|
||||||
|
),
|
||||||
|
"notAfter": get_not_valid_after(self.cert).strftime(
|
||||||
|
"%Y%m%d%H%M%SZ"
|
||||||
|
),
|
||||||
|
"serial_number": self.cert.serial_number,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def generate_serial_number() -> int:
|
||||||
|
"""Generate a serial number for a certificate"""
|
||||||
|
while True:
|
||||||
|
result = randrange(0, 1 << 160)
|
||||||
|
if result >= 1000:
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class SelfSignedCertificateProvider(CertificateProvider):
|
||||||
|
def validate_module_args(self, module: AnsibleModule) -> None:
|
||||||
|
if (
|
||||||
|
module.params["privatekey_path"] is None
|
||||||
|
and module.params["privatekey_content"] is None
|
||||||
|
):
|
||||||
|
module.fail_json(
|
||||||
|
msg="One of privatekey_path and privatekey_content must be specified for the selfsigned provider."
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_backend(
|
||||||
|
self, module: AnsibleModule
|
||||||
|
) -> SelfSignedCertificateBackendCryptography:
|
||||||
|
return SelfSignedCertificateBackendCryptography(module=module)
|
||||||
|
|
||||||
|
|
||||||
|
def add_selfsigned_provider_to_argument_spec(argument_spec: ArgumentSpec) -> None:
|
||||||
|
argument_spec.argument_spec["provider"]["choices"].append("selfsigned")
|
||||||
|
argument_spec.argument_spec.update(
|
||||||
|
{
|
||||||
|
"selfsigned_version": {
|
||||||
|
"type": "int",
|
||||||
|
"default": 3,
|
||||||
|
"choices": [3],
|
||||||
|
}, # not used
|
||||||
|
"selfsigned_digest": {"type": "str", "default": "sha256"},
|
||||||
|
"selfsigned_not_before": {
|
||||||
|
"type": "str",
|
||||||
|
"default": "+0s",
|
||||||
|
"aliases": ["selfsigned_notBefore"],
|
||||||
|
},
|
||||||
|
"selfsigned_not_after": {
|
||||||
|
"type": "str",
|
||||||
|
"default": "+3650d",
|
||||||
|
"aliases": ["selfsigned_notAfter"],
|
||||||
|
},
|
||||||
|
"selfsigned_create_subject_key_identifier": {
|
||||||
|
"type": "str",
|
||||||
|
"default": "create_if_not_provided",
|
||||||
|
"choices": ["create_if_not_provided", "always_create", "never_create"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"SelfSignedCertificateBackendCryptography",
|
||||||
|
"SelfSignedCertificateProvider",
|
||||||
|
"add_selfsigned_provider_to_argument_spec",
|
||||||
|
)
|
||||||
132
plugins/module_utils/_crypto/module_backends/crl_info.py
Normal file
132
plugins/module_utils/_crypto/module_backends/crl_info.py
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
# Copyright (c) 2020, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_crl import (
|
||||||
|
TIMESTAMP_FORMAT,
|
||||||
|
cryptography_decode_revoked_certificate,
|
||||||
|
cryptography_dump_revoked,
|
||||||
|
cryptography_get_signature_algorithm_oid_from_crl,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_support import (
|
||||||
|
cryptography_oid_to_name,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.pem import (
|
||||||
|
identify_pem_format,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._cryptography_dep import (
|
||||||
|
COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION,
|
||||||
|
assert_required_cryptography_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._action_module import (
|
||||||
|
AnsibleActionModule,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._filter_module import (
|
||||||
|
FilterModuleMock,
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import (
|
||||||
|
PrivateKeyTypes,
|
||||||
|
)
|
||||||
|
|
||||||
|
GeneralAnsibleModule = t.Union[AnsibleModule, AnsibleActionModule, FilterModuleMock]
|
||||||
|
|
||||||
|
|
||||||
|
# crypto_utils
|
||||||
|
|
||||||
|
MINIMAL_CRYPTOGRAPHY_VERSION = COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION
|
||||||
|
|
||||||
|
try:
|
||||||
|
from cryptography import x509
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class CRLInfoRetrieval:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
module: GeneralAnsibleModule,
|
||||||
|
content: bytes,
|
||||||
|
list_revoked_certificates: bool = True,
|
||||||
|
) -> None:
|
||||||
|
# content must be a bytes string
|
||||||
|
self.module = module
|
||||||
|
self.content = content
|
||||||
|
self.list_revoked_certificates = list_revoked_certificates
|
||||||
|
self.name_encoding = module.params.get("name_encoding", "ignore")
|
||||||
|
|
||||||
|
def get_info(self) -> dict[str, t.Any]:
|
||||||
|
crl_pem = identify_pem_format(self.content)
|
||||||
|
try:
|
||||||
|
if crl_pem:
|
||||||
|
crl = x509.load_pem_x509_crl(self.content)
|
||||||
|
else:
|
||||||
|
crl = x509.load_der_x509_crl(self.content)
|
||||||
|
except ValueError as e:
|
||||||
|
self.module.fail_json(msg=f"Error while decoding CRL: {e}")
|
||||||
|
|
||||||
|
result: dict[str, t.Any] = {
|
||||||
|
"changed": False,
|
||||||
|
"format": "pem" if crl_pem else "der",
|
||||||
|
"last_update": None,
|
||||||
|
"next_update": None,
|
||||||
|
"digest": None,
|
||||||
|
"issuer_ordered": None,
|
||||||
|
"issuer": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
result["last_update"] = crl.last_update.strftime(TIMESTAMP_FORMAT)
|
||||||
|
result["next_update"] = (
|
||||||
|
crl.next_update.strftime(TIMESTAMP_FORMAT) if crl.next_update else None
|
||||||
|
)
|
||||||
|
result["digest"] = cryptography_oid_to_name(
|
||||||
|
cryptography_get_signature_algorithm_oid_from_crl(crl)
|
||||||
|
)
|
||||||
|
issuer = []
|
||||||
|
for attribute in crl.issuer:
|
||||||
|
issuer.append([cryptography_oid_to_name(attribute.oid), attribute.value])
|
||||||
|
result["issuer_ordered"] = issuer
|
||||||
|
issuer_dict = {}
|
||||||
|
for k, v in issuer:
|
||||||
|
issuer_dict[k] = v
|
||||||
|
result["issuer"] = issuer_dict
|
||||||
|
if self.list_revoked_certificates:
|
||||||
|
result["revoked_certificates"] = []
|
||||||
|
for cert in crl:
|
||||||
|
entry = cryptography_decode_revoked_certificate(cert)
|
||||||
|
result["revoked_certificates"].append(
|
||||||
|
cryptography_dump_revoked(entry, idn_rewrite=self.name_encoding)
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def get_crl_info(
|
||||||
|
*,
|
||||||
|
module: GeneralAnsibleModule,
|
||||||
|
content: bytes,
|
||||||
|
list_revoked_certificates: bool = True,
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
assert_required_cryptography_version(
|
||||||
|
module, minimum_cryptography_version=MINIMAL_CRYPTOGRAPHY_VERSION
|
||||||
|
)
|
||||||
|
info = CRLInfoRetrieval(
|
||||||
|
module=module,
|
||||||
|
content=content,
|
||||||
|
list_revoked_certificates=list_revoked_certificates,
|
||||||
|
)
|
||||||
|
return info.get_info()
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("CRLInfoRetrieval", "get_crl_info")
|
||||||
939
plugins/module_utils/_crypto/module_backends/csr.py
Normal file
939
plugins/module_utils/_crypto/module_backends/csr.py
Normal file
@@ -0,0 +1,939 @@
|
|||||||
|
# Copyright (c) 2016, Yanis Guenane <yanis+ansible@guenane.org>
|
||||||
|
# Copyright (c) 2020, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import binascii
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._argspec import (
|
||||||
|
ArgumentSpec,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
|
OpenSSLBadPassphraseError,
|
||||||
|
OpenSSLObjectError,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_crl import (
|
||||||
|
REVOCATION_REASON_MAP,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_support import (
|
||||||
|
cryptography_get_basic_constraints,
|
||||||
|
cryptography_get_name,
|
||||||
|
cryptography_key_needs_digest_for_signing,
|
||||||
|
cryptography_name_to_oid,
|
||||||
|
cryptography_parse_key_usage_params,
|
||||||
|
cryptography_parse_relative_distinguished_name,
|
||||||
|
is_potential_certificate_issuer_public_key,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.csr_info import (
|
||||||
|
get_csr_info,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.support import (
|
||||||
|
load_certificate_issuer_privatekey,
|
||||||
|
load_certificate_request,
|
||||||
|
parse_name_field,
|
||||||
|
parse_ordered_name_field,
|
||||||
|
select_message_digest,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._cryptography_dep import (
|
||||||
|
COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION,
|
||||||
|
assert_required_cryptography_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_support import (
|
||||||
|
CertificatePrivateKeyTypes,
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import (
|
||||||
|
CertificateIssuerPrivateKeyTypes,
|
||||||
|
PrivateKeyTypes,
|
||||||
|
)
|
||||||
|
|
||||||
|
_ET = t.TypeVar("_ET", bound="cryptography.x509.ExtensionType")
|
||||||
|
|
||||||
|
|
||||||
|
MINIMAL_CRYPTOGRAPHY_VERSION = COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cryptography
|
||||||
|
import cryptography.exceptions
|
||||||
|
import cryptography.hazmat.backends
|
||||||
|
import cryptography.hazmat.primitives.hashes
|
||||||
|
import cryptography.hazmat.primitives.serialization
|
||||||
|
import cryptography.x509
|
||||||
|
import cryptography.x509.oid
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class CertificateSigningRequestError(OpenSSLObjectError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# From the object called `module`, only the following properties are used:
|
||||||
|
#
|
||||||
|
# - module.params[]
|
||||||
|
# - module.warn(msg: str)
|
||||||
|
# - module.fail_json(msg: str, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class CertificateSigningRequestBackend(metaclass=abc.ABCMeta):
|
||||||
|
def __init__(self, *, module: AnsibleModule) -> None:
|
||||||
|
self.module = module
|
||||||
|
self.digest: str = module.params["digest"]
|
||||||
|
self.privatekey_path: str | None = module.params["privatekey_path"]
|
||||||
|
privatekey_content: str | None = module.params["privatekey_content"]
|
||||||
|
if privatekey_content is not None:
|
||||||
|
self.privatekey_content: bytes | None = privatekey_content.encode("utf-8")
|
||||||
|
else:
|
||||||
|
self.privatekey_content = None
|
||||||
|
self.privatekey_passphrase: str | None = module.params["privatekey_passphrase"]
|
||||||
|
self.version: t.Literal[1] = module.params["version"]
|
||||||
|
self.subjectAltName: list[str] | None = module.params["subject_alt_name"]
|
||||||
|
self.subjectAltName_critical: bool = module.params["subject_alt_name_critical"]
|
||||||
|
self.keyUsage: list[str] | None = module.params["key_usage"]
|
||||||
|
self.keyUsage_critical: bool = module.params["key_usage_critical"]
|
||||||
|
self.extendedKeyUsage: list[str] | None = module.params["extended_key_usage"]
|
||||||
|
self.extendedKeyUsage_critical: bool = module.params[
|
||||||
|
"extended_key_usage_critical"
|
||||||
|
]
|
||||||
|
self.basicConstraints: list[str] | None = module.params["basic_constraints"]
|
||||||
|
self.basicConstraints_critical: bool = module.params[
|
||||||
|
"basic_constraints_critical"
|
||||||
|
]
|
||||||
|
self.ocspMustStaple: bool = module.params["ocsp_must_staple"]
|
||||||
|
self.ocspMustStaple_critical: bool = module.params["ocsp_must_staple_critical"]
|
||||||
|
self.name_constraints_permitted: list[str] = (
|
||||||
|
module.params["name_constraints_permitted"] or []
|
||||||
|
)
|
||||||
|
self.name_constraints_excluded: list[str] = (
|
||||||
|
module.params["name_constraints_excluded"] or []
|
||||||
|
)
|
||||||
|
self.name_constraints_critical: bool = module.params[
|
||||||
|
"name_constraints_critical"
|
||||||
|
]
|
||||||
|
self.create_subject_key_identifier: bool = module.params[
|
||||||
|
"create_subject_key_identifier"
|
||||||
|
]
|
||||||
|
subject_key_identifier: str | None = module.params["subject_key_identifier"]
|
||||||
|
authority_key_identifier: str | None = module.params["authority_key_identifier"]
|
||||||
|
self.authority_cert_issuer: list[str] | None = module.params[
|
||||||
|
"authority_cert_issuer"
|
||||||
|
]
|
||||||
|
self.authority_cert_serial_number: int = module.params[
|
||||||
|
"authority_cert_serial_number"
|
||||||
|
]
|
||||||
|
self.crl_distribution_points: (
|
||||||
|
list[cryptography.x509.DistributionPoint] | None
|
||||||
|
) = None
|
||||||
|
self.csr: cryptography.x509.CertificateSigningRequest | None = None
|
||||||
|
self.privatekey: CertificateIssuerPrivateKeyTypes | None = None
|
||||||
|
|
||||||
|
if self.create_subject_key_identifier and subject_key_identifier is not None:
|
||||||
|
module.fail_json(
|
||||||
|
msg="subject_key_identifier cannot be specified if create_subject_key_identifier is true"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.ordered_subject = False
|
||||||
|
self.subject = [
|
||||||
|
("C", module.params["country_name"]),
|
||||||
|
("ST", module.params["state_or_province_name"]),
|
||||||
|
("L", module.params["locality_name"]),
|
||||||
|
("O", module.params["organization_name"]),
|
||||||
|
("OU", module.params["organizational_unit_name"]),
|
||||||
|
("CN", module.params["common_name"]),
|
||||||
|
("emailAddress", module.params["email_address"]),
|
||||||
|
]
|
||||||
|
self.subject = [(entry[0], entry[1]) for entry in self.subject if entry[1]]
|
||||||
|
|
||||||
|
try:
|
||||||
|
if module.params["subject"]:
|
||||||
|
self.subject = self.subject + parse_name_field(
|
||||||
|
module.params["subject"], name_field_name="subject"
|
||||||
|
)
|
||||||
|
if module.params["subject_ordered"]:
|
||||||
|
if self.subject:
|
||||||
|
raise CertificateSigningRequestError(
|
||||||
|
"subject_ordered cannot be combined with any other subject field"
|
||||||
|
)
|
||||||
|
self.subject = parse_ordered_name_field(
|
||||||
|
module.params["subject_ordered"], name_field_name="subject_ordered"
|
||||||
|
)
|
||||||
|
self.ordered_subject = True
|
||||||
|
except ValueError as exc:
|
||||||
|
raise CertificateSigningRequestError(str(exc)) from exc
|
||||||
|
|
||||||
|
self.using_common_name_for_san = False
|
||||||
|
if not self.subjectAltName and module.params["use_common_name_for_san"]:
|
||||||
|
for sub in self.subject:
|
||||||
|
if sub[0] in ("commonName", "CN"):
|
||||||
|
self.subjectAltName = [f"DNS:{sub[1]}"]
|
||||||
|
self.using_common_name_for_san = True
|
||||||
|
break
|
||||||
|
|
||||||
|
self.subject_key_identifier: bytes | None = None
|
||||||
|
if subject_key_identifier is not None:
|
||||||
|
try:
|
||||||
|
self.subject_key_identifier = binascii.unhexlify(
|
||||||
|
subject_key_identifier.replace(":", "")
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
raise CertificateSigningRequestError(
|
||||||
|
f"Cannot parse subject_key_identifier: {e}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
self.authority_key_identifier: bytes | None = None
|
||||||
|
if authority_key_identifier is not None:
|
||||||
|
try:
|
||||||
|
self.authority_key_identifier = binascii.unhexlify(
|
||||||
|
authority_key_identifier.replace(":", "")
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
raise CertificateSigningRequestError(
|
||||||
|
f"Cannot parse authority_key_identifier: {e}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
self.existing_csr: cryptography.x509.CertificateSigningRequest | None = None
|
||||||
|
self.existing_csr_bytes: bytes | None = None
|
||||||
|
|
||||||
|
self.diff_before = self._get_info(data=None)
|
||||||
|
self.diff_after = self._get_info(data=None)
|
||||||
|
|
||||||
|
def _get_info(self, *, data: bytes | None) -> dict[str, t.Any]:
|
||||||
|
if data is None:
|
||||||
|
return {}
|
||||||
|
try:
|
||||||
|
result = get_csr_info(
|
||||||
|
module=self.module,
|
||||||
|
content=data,
|
||||||
|
validate_signature=False,
|
||||||
|
prefer_one_fingerprint=True,
|
||||||
|
)
|
||||||
|
result["can_parse_csr"] = True
|
||||||
|
return result
|
||||||
|
except Exception:
|
||||||
|
return {"can_parse_csr": False}
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def generate_csr(self) -> None:
|
||||||
|
"""(Re-)Generate CSR."""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def get_csr_data(self) -> bytes:
|
||||||
|
"""Return bytes for self.csr."""
|
||||||
|
|
||||||
|
def set_existing(self, *, csr_bytes: bytes | None) -> None:
|
||||||
|
"""Set existing CSR bytes. None indicates that the CSR does not exist."""
|
||||||
|
self.existing_csr_bytes = csr_bytes
|
||||||
|
self.diff_after = self.diff_before = self._get_info(
|
||||||
|
data=self.existing_csr_bytes
|
||||||
|
)
|
||||||
|
|
||||||
|
def has_existing(self) -> bool:
|
||||||
|
"""Query whether an existing CSR is/has been there."""
|
||||||
|
return self.existing_csr_bytes is not None
|
||||||
|
|
||||||
|
def _ensure_private_key_loaded(self) -> None:
|
||||||
|
"""Load the provided private key into self.privatekey."""
|
||||||
|
if self.privatekey is not None:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
self.privatekey = load_certificate_issuer_privatekey(
|
||||||
|
path=self.privatekey_path,
|
||||||
|
content=self.privatekey_content,
|
||||||
|
passphrase=self.privatekey_passphrase,
|
||||||
|
)
|
||||||
|
except OpenSSLBadPassphraseError as exc:
|
||||||
|
raise CertificateSigningRequestError(exc) from exc
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _check_csr(self) -> bool:
|
||||||
|
"""Check whether provided parameters, assuming self.existing_csr and self.privatekey have been populated."""
|
||||||
|
|
||||||
|
def needs_regeneration(self) -> bool:
|
||||||
|
"""Check whether a regeneration is necessary."""
|
||||||
|
if self.existing_csr_bytes is None:
|
||||||
|
return True
|
||||||
|
try:
|
||||||
|
self.existing_csr = load_certificate_request(
|
||||||
|
content=self.existing_csr_bytes,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
return True
|
||||||
|
self._ensure_private_key_loaded()
|
||||||
|
return not self._check_csr()
|
||||||
|
|
||||||
|
def dump(self, *, include_csr: bool) -> dict[str, t.Any]:
|
||||||
|
"""Serialize the object into a dictionary."""
|
||||||
|
result: dict[str, t.Any] = {
|
||||||
|
"privatekey": self.privatekey_path,
|
||||||
|
"subject": self.subject,
|
||||||
|
"subjectAltName": self.subjectAltName,
|
||||||
|
"keyUsage": self.keyUsage,
|
||||||
|
"extendedKeyUsage": self.extendedKeyUsage,
|
||||||
|
"basicConstraints": self.basicConstraints,
|
||||||
|
"ocspMustStaple": self.ocspMustStaple,
|
||||||
|
"name_constraints_permitted": self.name_constraints_permitted,
|
||||||
|
"name_constraints_excluded": self.name_constraints_excluded,
|
||||||
|
}
|
||||||
|
# Get hold of CSR bytes
|
||||||
|
csr_bytes = self.existing_csr_bytes
|
||||||
|
if self.csr is not None:
|
||||||
|
csr_bytes = self.get_csr_data()
|
||||||
|
self.diff_after = self._get_info(data=csr_bytes)
|
||||||
|
if include_csr:
|
||||||
|
# Store result
|
||||||
|
result["csr"] = csr_bytes.decode("utf-8") if csr_bytes else None
|
||||||
|
|
||||||
|
result["diff"] = {
|
||||||
|
"before": self.diff_before,
|
||||||
|
"after": self.diff_after,
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def parse_crl_distribution_points(
|
||||||
|
*, module: AnsibleModule, crl_distribution_points: list[dict[str, t.Any]]
|
||||||
|
) -> list[cryptography.x509.DistributionPoint]:
|
||||||
|
result = []
|
||||||
|
for index, parse_crl_distribution_point in enumerate(crl_distribution_points):
|
||||||
|
try:
|
||||||
|
full_name = None
|
||||||
|
relative_name = None
|
||||||
|
crl_issuer = None
|
||||||
|
reasons = None
|
||||||
|
if parse_crl_distribution_point["full_name"] is not None:
|
||||||
|
if not parse_crl_distribution_point["full_name"]:
|
||||||
|
raise OpenSSLObjectError("full_name must not be empty")
|
||||||
|
full_name = [
|
||||||
|
cryptography_get_name(name, what="full name")
|
||||||
|
for name in parse_crl_distribution_point["full_name"]
|
||||||
|
]
|
||||||
|
if parse_crl_distribution_point["relative_name"] is not None:
|
||||||
|
if not parse_crl_distribution_point["relative_name"]:
|
||||||
|
raise OpenSSLObjectError("relative_name must not be empty")
|
||||||
|
relative_name = cryptography_parse_relative_distinguished_name(
|
||||||
|
parse_crl_distribution_point["relative_name"]
|
||||||
|
)
|
||||||
|
if parse_crl_distribution_point["crl_issuer"] is not None:
|
||||||
|
if not parse_crl_distribution_point["crl_issuer"]:
|
||||||
|
raise OpenSSLObjectError("crl_issuer must not be empty")
|
||||||
|
crl_issuer = [
|
||||||
|
cryptography_get_name(name, what="CRL issuer")
|
||||||
|
for name in parse_crl_distribution_point["crl_issuer"]
|
||||||
|
]
|
||||||
|
if parse_crl_distribution_point["reasons"] is not None:
|
||||||
|
reasons_list = []
|
||||||
|
for reason in parse_crl_distribution_point["reasons"]:
|
||||||
|
reasons_list.append(REVOCATION_REASON_MAP[reason])
|
||||||
|
reasons = frozenset(reasons_list)
|
||||||
|
result.append(
|
||||||
|
cryptography.x509.DistributionPoint(
|
||||||
|
full_name=full_name,
|
||||||
|
relative_name=relative_name,
|
||||||
|
crl_issuer=crl_issuer,
|
||||||
|
reasons=reasons,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except (OpenSSLObjectError, ValueError) as e:
|
||||||
|
raise OpenSSLObjectError(
|
||||||
|
f"Error while parsing CRL distribution point #{index}: {e}"
|
||||||
|
) from e
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
# Implementation with using cryptography
|
||||||
|
class CertificateSigningRequestCryptographyBackend(CertificateSigningRequestBackend):
|
||||||
|
def __init__(self, *, module: AnsibleModule) -> None:
|
||||||
|
super().__init__(module=module)
|
||||||
|
if self.version != 1:
|
||||||
|
module.warn(
|
||||||
|
"The cryptography backend only supports version 1. (The only valid value according to RFC 2986.)"
|
||||||
|
)
|
||||||
|
|
||||||
|
crl_distribution_points: list[dict[str, t.Any]] | None = module.params[
|
||||||
|
"crl_distribution_points"
|
||||||
|
]
|
||||||
|
if crl_distribution_points:
|
||||||
|
self.crl_distribution_points = parse_crl_distribution_points(
|
||||||
|
module=module, crl_distribution_points=crl_distribution_points
|
||||||
|
)
|
||||||
|
|
||||||
|
def generate_csr(self) -> None:
|
||||||
|
"""(Re-)Generate CSR."""
|
||||||
|
self._ensure_private_key_loaded()
|
||||||
|
assert self.privatekey is not None
|
||||||
|
|
||||||
|
csr = cryptography.x509.CertificateSigningRequestBuilder()
|
||||||
|
try:
|
||||||
|
csr = csr.subject_name(
|
||||||
|
cryptography.x509.Name(
|
||||||
|
[
|
||||||
|
cryptography.x509.NameAttribute(
|
||||||
|
cryptography_name_to_oid(entry[0]), to_text(entry[1])
|
||||||
|
)
|
||||||
|
for entry in self.subject
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except ValueError as e:
|
||||||
|
raise CertificateSigningRequestError(e) from e
|
||||||
|
|
||||||
|
if self.subjectAltName:
|
||||||
|
csr = csr.add_extension(
|
||||||
|
cryptography.x509.SubjectAlternativeName(
|
||||||
|
[cryptography_get_name(name) for name in self.subjectAltName]
|
||||||
|
),
|
||||||
|
critical=self.subjectAltName_critical,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.keyUsage:
|
||||||
|
params = cryptography_parse_key_usage_params(self.keyUsage)
|
||||||
|
csr = csr.add_extension(
|
||||||
|
cryptography.x509.KeyUsage(**params), critical=self.keyUsage_critical
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.extendedKeyUsage:
|
||||||
|
usages = [
|
||||||
|
cryptography_name_to_oid(usage) for usage in self.extendedKeyUsage
|
||||||
|
]
|
||||||
|
csr = csr.add_extension(
|
||||||
|
cryptography.x509.ExtendedKeyUsage(usages),
|
||||||
|
critical=self.extendedKeyUsage_critical,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.basicConstraints:
|
||||||
|
params = {}
|
||||||
|
ca, path_length = cryptography_get_basic_constraints(self.basicConstraints)
|
||||||
|
csr = csr.add_extension(
|
||||||
|
cryptography.x509.BasicConstraints(ca, path_length),
|
||||||
|
critical=self.basicConstraints_critical,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.ocspMustStaple:
|
||||||
|
csr = csr.add_extension(
|
||||||
|
cryptography.x509.TLSFeature(
|
||||||
|
[cryptography.x509.TLSFeatureType.status_request]
|
||||||
|
),
|
||||||
|
critical=self.ocspMustStaple_critical,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.name_constraints_permitted or self.name_constraints_excluded:
|
||||||
|
try:
|
||||||
|
csr = csr.add_extension(
|
||||||
|
cryptography.x509.NameConstraints(
|
||||||
|
[
|
||||||
|
cryptography_get_name(
|
||||||
|
name, what="name constraints permitted"
|
||||||
|
)
|
||||||
|
for name in self.name_constraints_permitted
|
||||||
|
]
|
||||||
|
or None,
|
||||||
|
[
|
||||||
|
cryptography_get_name(
|
||||||
|
name, what="name constraints excluded"
|
||||||
|
)
|
||||||
|
for name in self.name_constraints_excluded
|
||||||
|
]
|
||||||
|
or None,
|
||||||
|
),
|
||||||
|
critical=self.name_constraints_critical,
|
||||||
|
)
|
||||||
|
except TypeError as e:
|
||||||
|
raise OpenSSLObjectError(
|
||||||
|
f"Error while parsing name constraint: {e}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
if self.create_subject_key_identifier:
|
||||||
|
if not is_potential_certificate_issuer_public_key(
|
||||||
|
self.privatekey.public_key()
|
||||||
|
):
|
||||||
|
raise OpenSSLObjectError(
|
||||||
|
"Private key can not be used to create subject key identifier"
|
||||||
|
)
|
||||||
|
csr = csr.add_extension(
|
||||||
|
cryptography.x509.SubjectKeyIdentifier.from_public_key(
|
||||||
|
self.privatekey.public_key()
|
||||||
|
),
|
||||||
|
critical=False,
|
||||||
|
)
|
||||||
|
elif self.subject_key_identifier is not None:
|
||||||
|
csr = csr.add_extension(
|
||||||
|
cryptography.x509.SubjectKeyIdentifier(self.subject_key_identifier),
|
||||||
|
critical=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
self.authority_key_identifier is not None
|
||||||
|
or self.authority_cert_issuer is not None
|
||||||
|
or self.authority_cert_serial_number is not None
|
||||||
|
):
|
||||||
|
issuers = None
|
||||||
|
if self.authority_cert_issuer is not None:
|
||||||
|
issuers = [
|
||||||
|
cryptography_get_name(n, what="authority cert issuer")
|
||||||
|
for n in self.authority_cert_issuer
|
||||||
|
]
|
||||||
|
csr = csr.add_extension(
|
||||||
|
cryptography.x509.AuthorityKeyIdentifier(
|
||||||
|
self.authority_key_identifier,
|
||||||
|
issuers,
|
||||||
|
self.authority_cert_serial_number,
|
||||||
|
),
|
||||||
|
critical=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.crl_distribution_points:
|
||||||
|
csr = csr.add_extension(
|
||||||
|
cryptography.x509.CRLDistributionPoints(self.crl_distribution_points),
|
||||||
|
critical=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# csr.sign() does not accept some digests we theoretically could have in digest.
|
||||||
|
# For that reason we use type t.Any here. csr.sign() will complain if
|
||||||
|
# the digest is not acceptable.
|
||||||
|
digest: t.Any | None = None
|
||||||
|
if cryptography_key_needs_digest_for_signing(self.privatekey):
|
||||||
|
digest = select_message_digest(self.digest)
|
||||||
|
if digest is None:
|
||||||
|
raise CertificateSigningRequestError(
|
||||||
|
f'Unsupported digest "{self.digest}"'
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
self.csr = csr.sign(self.privatekey, digest)
|
||||||
|
except UnicodeError as e:
|
||||||
|
# This catches IDNAErrors, which happens when a bad name is passed as a SAN
|
||||||
|
# (https://github.com/ansible-collections/community.crypto/issues/105).
|
||||||
|
# For older cryptography versions, this is handled by idna, which raises
|
||||||
|
# an idna.core.IDNAError. Later versions of cryptography deprecated and stopped
|
||||||
|
# requiring idna, whence we cannot easily handle this error. Fortunately, in
|
||||||
|
# most versions of idna, IDNAError extends UnicodeError. There is only version
|
||||||
|
# 2.3 where it extends Exception instead (see
|
||||||
|
# https://github.com/kjd/idna/commit/ebefacd3134d0f5da4745878620a6a1cba86d130
|
||||||
|
# and then
|
||||||
|
# https://github.com/kjd/idna/commit/ea03c7b5db7d2a99af082e0239da2b68aeea702a).
|
||||||
|
msg = f"Error while creating CSR: {e}\n"
|
||||||
|
if self.using_common_name_for_san:
|
||||||
|
self.module.fail_json(
|
||||||
|
msg=msg
|
||||||
|
+ "This is probably caused because the Common Name is used as a SAN."
|
||||||
|
" Specifying use_common_name_for_san=false might fix this."
|
||||||
|
)
|
||||||
|
self.module.fail_json(
|
||||||
|
msg=msg
|
||||||
|
+ "This is probably caused by an invalid Subject Alternative DNS Name."
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_csr_data(self) -> bytes:
|
||||||
|
"""Return bytes for self.csr."""
|
||||||
|
if self.csr is None:
|
||||||
|
raise AssertionError("Violated contract: csr is not populated")
|
||||||
|
return self.csr.public_bytes(
|
||||||
|
cryptography.hazmat.primitives.serialization.Encoding.PEM
|
||||||
|
)
|
||||||
|
|
||||||
|
def _check_csr(self) -> bool:
|
||||||
|
"""Check whether provided parameters, assuming self.existing_csr and self.privatekey have been populated."""
|
||||||
|
if self.existing_csr is None:
|
||||||
|
raise AssertionError("Violated contract: existing_csr is not populated")
|
||||||
|
if self.privatekey is None:
|
||||||
|
raise AssertionError("Violated contract: privatekey is not populated")
|
||||||
|
|
||||||
|
def _check_subject(csr: cryptography.x509.CertificateSigningRequest) -> bool:
|
||||||
|
subject = [
|
||||||
|
(cryptography_name_to_oid(entry[0]), to_text(entry[1]))
|
||||||
|
for entry in self.subject
|
||||||
|
]
|
||||||
|
current_subject = [(sub.oid, sub.value) for sub in csr.subject]
|
||||||
|
if self.ordered_subject:
|
||||||
|
return subject == current_subject
|
||||||
|
return set(subject) == set(current_subject)
|
||||||
|
|
||||||
|
def _find_extension(
|
||||||
|
extensions: cryptography.x509.Extensions, exttype: type[_ET]
|
||||||
|
) -> cryptography.x509.Extension[_ET] | None:
|
||||||
|
return next(
|
||||||
|
(ext for ext in extensions if isinstance(ext.value, exttype)), None
|
||||||
|
)
|
||||||
|
|
||||||
|
def _check_subjectAltName(extensions: cryptography.x509.Extensions) -> bool:
|
||||||
|
current_altnames_ext = _find_extension(
|
||||||
|
extensions, cryptography.x509.SubjectAlternativeName
|
||||||
|
)
|
||||||
|
current_altnames = (
|
||||||
|
[to_text(altname) for altname in current_altnames_ext.value]
|
||||||
|
if current_altnames_ext
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
altnames = (
|
||||||
|
[
|
||||||
|
to_text(cryptography_get_name(altname))
|
||||||
|
for altname in self.subjectAltName
|
||||||
|
]
|
||||||
|
if self.subjectAltName
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
if set(altnames) != set(current_altnames):
|
||||||
|
return False
|
||||||
|
if altnames and current_altnames_ext:
|
||||||
|
if current_altnames_ext.critical != self.subjectAltName_critical:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _check_keyUsage(extensions: cryptography.x509.Extensions) -> bool:
|
||||||
|
current_keyusage_ext = _find_extension(
|
||||||
|
extensions, cryptography.x509.KeyUsage
|
||||||
|
)
|
||||||
|
if not self.keyUsage:
|
||||||
|
return current_keyusage_ext is None
|
||||||
|
if current_keyusage_ext is None:
|
||||||
|
return False
|
||||||
|
params = cryptography_parse_key_usage_params(self.keyUsage)
|
||||||
|
for param, value in params.items():
|
||||||
|
# TODO: check whether getattr() with '_' prepended is really needed
|
||||||
|
if getattr(current_keyusage_ext.value, "_" + param) != value:
|
||||||
|
return False
|
||||||
|
return current_keyusage_ext.critical == self.keyUsage_critical
|
||||||
|
|
||||||
|
def _check_extenededKeyUsage(extensions: cryptography.x509.Extensions) -> bool:
|
||||||
|
current_usages_ext = _find_extension(
|
||||||
|
extensions, cryptography.x509.ExtendedKeyUsage
|
||||||
|
)
|
||||||
|
current_usages = (
|
||||||
|
[str(usage) for usage in current_usages_ext.value]
|
||||||
|
if current_usages_ext
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
usages = (
|
||||||
|
[
|
||||||
|
str(cryptography_name_to_oid(usage))
|
||||||
|
for usage in self.extendedKeyUsage
|
||||||
|
]
|
||||||
|
if self.extendedKeyUsage
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
if set(current_usages) != set(usages):
|
||||||
|
return False
|
||||||
|
if usages and current_usages_ext:
|
||||||
|
if current_usages_ext.critical != self.extendedKeyUsage_critical:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _check_basicConstraints(extensions: cryptography.x509.Extensions) -> bool:
|
||||||
|
bc_ext = _find_extension(extensions, cryptography.x509.BasicConstraints)
|
||||||
|
current_ca = bc_ext.value.ca if bc_ext else False
|
||||||
|
current_path_length = bc_ext.value.path_length if bc_ext else None
|
||||||
|
ca, path_length = cryptography_get_basic_constraints(self.basicConstraints)
|
||||||
|
# Check CA flag
|
||||||
|
if ca != current_ca:
|
||||||
|
return False
|
||||||
|
# Check path length
|
||||||
|
if path_length != current_path_length:
|
||||||
|
return False
|
||||||
|
# Check criticality
|
||||||
|
if self.basicConstraints:
|
||||||
|
return (
|
||||||
|
bc_ext is not None
|
||||||
|
and bc_ext.critical == self.basicConstraints_critical
|
||||||
|
)
|
||||||
|
return bc_ext is None
|
||||||
|
|
||||||
|
def _check_ocspMustStaple(extensions: cryptography.x509.Extensions) -> bool:
|
||||||
|
tlsfeature_ext = _find_extension(extensions, cryptography.x509.TLSFeature)
|
||||||
|
if self.ocspMustStaple:
|
||||||
|
if (
|
||||||
|
not tlsfeature_ext
|
||||||
|
or tlsfeature_ext.critical != self.ocspMustStaple_critical
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
return (
|
||||||
|
cryptography.x509.TLSFeatureType.status_request
|
||||||
|
in tlsfeature_ext.value
|
||||||
|
)
|
||||||
|
return tlsfeature_ext is None
|
||||||
|
|
||||||
|
def _check_nameConstraints(extensions: cryptography.x509.Extensions) -> bool:
|
||||||
|
current_nc_ext = _find_extension(
|
||||||
|
extensions, cryptography.x509.NameConstraints
|
||||||
|
)
|
||||||
|
current_nc_perm = (
|
||||||
|
[
|
||||||
|
to_text(altname)
|
||||||
|
for altname in current_nc_ext.value.permitted_subtrees or []
|
||||||
|
]
|
||||||
|
if current_nc_ext
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
current_nc_excl = (
|
||||||
|
[
|
||||||
|
to_text(altname)
|
||||||
|
for altname in current_nc_ext.value.excluded_subtrees or []
|
||||||
|
]
|
||||||
|
if current_nc_ext
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
nc_perm = [
|
||||||
|
to_text(
|
||||||
|
cryptography_get_name(altname, what="name constraints permitted")
|
||||||
|
)
|
||||||
|
for altname in self.name_constraints_permitted
|
||||||
|
]
|
||||||
|
nc_excl = [
|
||||||
|
to_text(
|
||||||
|
cryptography_get_name(altname, what="name constraints excluded")
|
||||||
|
)
|
||||||
|
for altname in self.name_constraints_excluded
|
||||||
|
]
|
||||||
|
if set(nc_perm) != set(current_nc_perm) or set(nc_excl) != set(
|
||||||
|
current_nc_excl
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
if (nc_perm or nc_excl) and current_nc_ext:
|
||||||
|
if current_nc_ext.critical != self.name_constraints_critical:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _check_subject_key_identifier(
|
||||||
|
extensions: cryptography.x509.Extensions,
|
||||||
|
) -> bool:
|
||||||
|
ext = _find_extension(extensions, cryptography.x509.SubjectKeyIdentifier)
|
||||||
|
if (
|
||||||
|
self.create_subject_key_identifier
|
||||||
|
or self.subject_key_identifier is not None
|
||||||
|
):
|
||||||
|
if not ext or ext.critical:
|
||||||
|
return False
|
||||||
|
if self.create_subject_key_identifier:
|
||||||
|
assert self.privatekey is not None
|
||||||
|
digest = cryptography.x509.SubjectKeyIdentifier.from_public_key(
|
||||||
|
self.privatekey.public_key()
|
||||||
|
).digest
|
||||||
|
return ext.value.digest == digest
|
||||||
|
return ext.value.digest == self.subject_key_identifier
|
||||||
|
return ext is None
|
||||||
|
|
||||||
|
def _check_authority_key_identifier(
|
||||||
|
extensions: cryptography.x509.Extensions,
|
||||||
|
) -> bool:
|
||||||
|
ext = _find_extension(extensions, cryptography.x509.AuthorityKeyIdentifier)
|
||||||
|
if (
|
||||||
|
self.authority_key_identifier is not None
|
||||||
|
or self.authority_cert_issuer is not None
|
||||||
|
or self.authority_cert_serial_number is not None
|
||||||
|
):
|
||||||
|
if not ext or ext.critical:
|
||||||
|
return False
|
||||||
|
aci = None
|
||||||
|
csr_aci = None
|
||||||
|
if self.authority_cert_issuer is not None:
|
||||||
|
aci = [
|
||||||
|
to_text(cryptography_get_name(n, what="authority cert issuer"))
|
||||||
|
for n in self.authority_cert_issuer
|
||||||
|
]
|
||||||
|
if ext.value.authority_cert_issuer is not None:
|
||||||
|
csr_aci = [to_text(n) for n in ext.value.authority_cert_issuer]
|
||||||
|
return (
|
||||||
|
ext.value.key_identifier == self.authority_key_identifier
|
||||||
|
and csr_aci == aci
|
||||||
|
and ext.value.authority_cert_serial_number
|
||||||
|
== self.authority_cert_serial_number
|
||||||
|
)
|
||||||
|
return ext is None
|
||||||
|
|
||||||
|
def _check_crl_distribution_points(
|
||||||
|
extensions: cryptography.x509.Extensions,
|
||||||
|
) -> bool:
|
||||||
|
ext = _find_extension(extensions, cryptography.x509.CRLDistributionPoints)
|
||||||
|
if self.crl_distribution_points is None:
|
||||||
|
return ext is None
|
||||||
|
if not ext:
|
||||||
|
return False
|
||||||
|
return list(ext.value) == self.crl_distribution_points
|
||||||
|
|
||||||
|
def _check_extensions(csr: cryptography.x509.CertificateSigningRequest) -> bool:
|
||||||
|
extensions = csr.extensions
|
||||||
|
return (
|
||||||
|
_check_subjectAltName(extensions)
|
||||||
|
and _check_keyUsage(extensions)
|
||||||
|
and _check_extenededKeyUsage(extensions)
|
||||||
|
and _check_basicConstraints(extensions)
|
||||||
|
and _check_ocspMustStaple(extensions)
|
||||||
|
and _check_subject_key_identifier(extensions)
|
||||||
|
and _check_authority_key_identifier(extensions)
|
||||||
|
and _check_nameConstraints(extensions)
|
||||||
|
and _check_crl_distribution_points(extensions)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _check_signature(csr: cryptography.x509.CertificateSigningRequest) -> bool:
|
||||||
|
if not csr.is_signature_valid:
|
||||||
|
return False
|
||||||
|
# To check whether public key of CSR belongs to private key,
|
||||||
|
# encode both public keys and compare PEMs.
|
||||||
|
key_a = csr.public_key().public_bytes(
|
||||||
|
cryptography.hazmat.primitives.serialization.Encoding.PEM,
|
||||||
|
cryptography.hazmat.primitives.serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||||
|
)
|
||||||
|
assert self.privatekey is not None
|
||||||
|
key_b = self.privatekey.public_key().public_bytes(
|
||||||
|
cryptography.hazmat.primitives.serialization.Encoding.PEM,
|
||||||
|
cryptography.hazmat.primitives.serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||||
|
)
|
||||||
|
return key_a == key_b
|
||||||
|
|
||||||
|
return (
|
||||||
|
_check_subject(self.existing_csr)
|
||||||
|
and _check_extensions(self.existing_csr)
|
||||||
|
and _check_signature(self.existing_csr)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def select_backend(
|
||||||
|
module: AnsibleModule,
|
||||||
|
) -> CertificateSigningRequestCryptographyBackend:
|
||||||
|
assert_required_cryptography_version(
|
||||||
|
module, minimum_cryptography_version=MINIMAL_CRYPTOGRAPHY_VERSION
|
||||||
|
)
|
||||||
|
return CertificateSigningRequestCryptographyBackend(module=module)
|
||||||
|
|
||||||
|
|
||||||
|
def get_csr_argument_spec() -> ArgumentSpec:
|
||||||
|
return ArgumentSpec(
|
||||||
|
argument_spec={
|
||||||
|
"digest": {"type": "str", "default": "sha256"},
|
||||||
|
"privatekey_path": {"type": "path"},
|
||||||
|
"privatekey_content": {"type": "str", "no_log": True},
|
||||||
|
"privatekey_passphrase": {"type": "str", "no_log": True},
|
||||||
|
"version": {"type": "int", "default": 1, "choices": [1]},
|
||||||
|
"subject": {"type": "dict"},
|
||||||
|
"subject_ordered": {"type": "list", "elements": "dict"},
|
||||||
|
"country_name": {"type": "str", "aliases": ["C", "countryName"]},
|
||||||
|
"state_or_province_name": {
|
||||||
|
"type": "str",
|
||||||
|
"aliases": ["ST", "stateOrProvinceName"],
|
||||||
|
},
|
||||||
|
"locality_name": {"type": "str", "aliases": ["L", "localityName"]},
|
||||||
|
"organization_name": {"type": "str", "aliases": ["O", "organizationName"]},
|
||||||
|
"organizational_unit_name": {
|
||||||
|
"type": "str",
|
||||||
|
"aliases": ["OU", "organizationalUnitName"],
|
||||||
|
},
|
||||||
|
"common_name": {"type": "str", "aliases": ["CN", "commonName"]},
|
||||||
|
"email_address": {"type": "str", "aliases": ["E", "emailAddress"]},
|
||||||
|
"subject_alt_name": {
|
||||||
|
"type": "list",
|
||||||
|
"elements": "str",
|
||||||
|
"aliases": ["subjectAltName"],
|
||||||
|
},
|
||||||
|
"subject_alt_name_critical": {
|
||||||
|
"type": "bool",
|
||||||
|
"default": False,
|
||||||
|
"aliases": ["subjectAltName_critical"],
|
||||||
|
},
|
||||||
|
"use_common_name_for_san": {
|
||||||
|
"type": "bool",
|
||||||
|
"default": True,
|
||||||
|
"aliases": ["useCommonNameForSAN"],
|
||||||
|
},
|
||||||
|
"key_usage": {"type": "list", "elements": "str", "aliases": ["keyUsage"]},
|
||||||
|
"key_usage_critical": {
|
||||||
|
"type": "bool",
|
||||||
|
"default": False,
|
||||||
|
"aliases": ["keyUsage_critical"],
|
||||||
|
},
|
||||||
|
"extended_key_usage": {
|
||||||
|
"type": "list",
|
||||||
|
"elements": "str",
|
||||||
|
"aliases": ["extKeyUsage", "extendedKeyUsage"],
|
||||||
|
},
|
||||||
|
"extended_key_usage_critical": {
|
||||||
|
"type": "bool",
|
||||||
|
"default": False,
|
||||||
|
"aliases": ["extKeyUsage_critical", "extendedKeyUsage_critical"],
|
||||||
|
},
|
||||||
|
"basic_constraints": {
|
||||||
|
"type": "list",
|
||||||
|
"elements": "str",
|
||||||
|
"aliases": ["basicConstraints"],
|
||||||
|
},
|
||||||
|
"basic_constraints_critical": {
|
||||||
|
"type": "bool",
|
||||||
|
"default": False,
|
||||||
|
"aliases": ["basicConstraints_critical"],
|
||||||
|
},
|
||||||
|
"ocsp_must_staple": {
|
||||||
|
"type": "bool",
|
||||||
|
"default": False,
|
||||||
|
"aliases": ["ocspMustStaple"],
|
||||||
|
},
|
||||||
|
"ocsp_must_staple_critical": {
|
||||||
|
"type": "bool",
|
||||||
|
"default": False,
|
||||||
|
"aliases": ["ocspMustStaple_critical"],
|
||||||
|
},
|
||||||
|
"name_constraints_permitted": {"type": "list", "elements": "str"},
|
||||||
|
"name_constraints_excluded": {"type": "list", "elements": "str"},
|
||||||
|
"name_constraints_critical": {"type": "bool", "default": False},
|
||||||
|
"create_subject_key_identifier": {"type": "bool", "default": False},
|
||||||
|
"subject_key_identifier": {"type": "str"},
|
||||||
|
"authority_key_identifier": {"type": "str"},
|
||||||
|
"authority_cert_issuer": {"type": "list", "elements": "str"},
|
||||||
|
"authority_cert_serial_number": {"type": "int"},
|
||||||
|
"crl_distribution_points": {
|
||||||
|
"type": "list",
|
||||||
|
"elements": "dict",
|
||||||
|
"options": {
|
||||||
|
"full_name": {"type": "list", "elements": "str"},
|
||||||
|
"relative_name": {"type": "list", "elements": "str"},
|
||||||
|
"crl_issuer": {"type": "list", "elements": "str"},
|
||||||
|
"reasons": {
|
||||||
|
"type": "list",
|
||||||
|
"elements": "str",
|
||||||
|
"choices": [
|
||||||
|
"key_compromise",
|
||||||
|
"ca_compromise",
|
||||||
|
"affiliation_changed",
|
||||||
|
"superseded",
|
||||||
|
"cessation_of_operation",
|
||||||
|
"certificate_hold",
|
||||||
|
"privilege_withdrawn",
|
||||||
|
"aa_compromise",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"mutually_exclusive": [("full_name", "relative_name")],
|
||||||
|
"required_one_of": [("full_name", "relative_name", "crl_issuer")],
|
||||||
|
},
|
||||||
|
"select_crypto_backend": {
|
||||||
|
"type": "str",
|
||||||
|
"default": "auto",
|
||||||
|
"choices": ["auto", "cryptography"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required_together=[
|
||||||
|
["authority_cert_issuer", "authority_cert_serial_number"],
|
||||||
|
],
|
||||||
|
mutually_exclusive=[
|
||||||
|
["privatekey_path", "privatekey_content"],
|
||||||
|
["subject", "subject_ordered"],
|
||||||
|
],
|
||||||
|
required_one_of=[
|
||||||
|
["privatekey_path", "privatekey_content"],
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"CertificateSigningRequestError",
|
||||||
|
"CertificateSigningRequestBackend",
|
||||||
|
"select_backend",
|
||||||
|
"get_csr_argument_spec",
|
||||||
|
)
|
||||||
399
plugins/module_utils/_crypto/module_backends/csr_info.py
Normal file
399
plugins/module_utils/_crypto/module_backends/csr_info.py
Normal file
@@ -0,0 +1,399 @@
|
|||||||
|
# Copyright (c) 2016-2017, Yanis Guenane <yanis+ansible@guenane.org>
|
||||||
|
# Copyright (c) 2017, Markus Teufelberger <mteufelberger+ansible@mgit.at>
|
||||||
|
# Copyright (c) 2020, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import binascii
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_support import (
|
||||||
|
cryptography_decode_name,
|
||||||
|
cryptography_get_extensions_from_csr,
|
||||||
|
cryptography_oid_to_name,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.publickey_info import (
|
||||||
|
get_publickey_info,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.support import (
|
||||||
|
load_certificate_request,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._cryptography_dep import (
|
||||||
|
COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION,
|
||||||
|
assert_required_cryptography_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._action_module import (
|
||||||
|
AnsibleActionModule,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._filter_module import (
|
||||||
|
FilterModuleMock,
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import (
|
||||||
|
CertificatePublicKeyTypes,
|
||||||
|
PrivateKeyTypes,
|
||||||
|
)
|
||||||
|
|
||||||
|
GeneralAnsibleModule = t.Union[AnsibleModule, AnsibleActionModule, FilterModuleMock]
|
||||||
|
|
||||||
|
|
||||||
|
MINIMAL_CRYPTOGRAPHY_VERSION = COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cryptography
|
||||||
|
from cryptography import x509
|
||||||
|
from cryptography.hazmat.primitives import serialization
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
TIMESTAMP_FORMAT = "%Y%m%d%H%M%SZ"
|
||||||
|
|
||||||
|
|
||||||
|
class CSRInfoRetrieval(metaclass=abc.ABCMeta):
|
||||||
|
csr: x509.CertificateSigningRequest
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, *, module: GeneralAnsibleModule, content: bytes, validate_signature: bool
|
||||||
|
) -> None:
|
||||||
|
self.module = module
|
||||||
|
self.content = content
|
||||||
|
self.validate_signature = validate_signature
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_subject_ordered(self) -> list[list[str]]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_key_usage(self) -> tuple[list[str] | None, bool]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_extended_key_usage(self) -> tuple[list[str] | None, bool]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_basic_constraints(self) -> tuple[list[str] | None, bool]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_ocsp_must_staple(self) -> tuple[bool | None, bool]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_subject_alt_name(self) -> tuple[list[str] | None, bool]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_name_constraints(self) -> tuple[list[str] | None, list[str] | None, bool]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_public_key_pem(self) -> bytes:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_public_key_object(self) -> CertificatePublicKeyTypes:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_subject_key_identifier(self) -> bytes | None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_authority_key_identifier(
|
||||||
|
self,
|
||||||
|
) -> tuple[bytes | None, list[str] | None, int | None]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_all_extensions(self) -> dict[str, dict[str, bool | str]]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _is_signature_valid(self) -> bool:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_info(self, *, prefer_one_fingerprint: bool = False) -> dict[str, t.Any]:
|
||||||
|
result: dict[str, t.Any] = {}
|
||||||
|
self.csr = load_certificate_request(
|
||||||
|
content=self.content,
|
||||||
|
)
|
||||||
|
|
||||||
|
subject = self._get_subject_ordered()
|
||||||
|
result["subject"] = {}
|
||||||
|
for k, v in subject:
|
||||||
|
result["subject"][k] = v
|
||||||
|
result["subject_ordered"] = subject
|
||||||
|
result["key_usage"], result["key_usage_critical"] = self._get_key_usage()
|
||||||
|
result["extended_key_usage"], result["extended_key_usage_critical"] = (
|
||||||
|
self._get_extended_key_usage()
|
||||||
|
)
|
||||||
|
result["basic_constraints"], result["basic_constraints_critical"] = (
|
||||||
|
self._get_basic_constraints()
|
||||||
|
)
|
||||||
|
result["ocsp_must_staple"], result["ocsp_must_staple_critical"] = (
|
||||||
|
self._get_ocsp_must_staple()
|
||||||
|
)
|
||||||
|
result["subject_alt_name"], result["subject_alt_name_critical"] = (
|
||||||
|
self._get_subject_alt_name()
|
||||||
|
)
|
||||||
|
(
|
||||||
|
result["name_constraints_permitted"],
|
||||||
|
result["name_constraints_excluded"],
|
||||||
|
result["name_constraints_critical"],
|
||||||
|
) = self._get_name_constraints()
|
||||||
|
|
||||||
|
result["public_key"] = to_text(self._get_public_key_pem())
|
||||||
|
|
||||||
|
public_key_info = get_publickey_info(
|
||||||
|
module=self.module,
|
||||||
|
key=self._get_public_key_object(),
|
||||||
|
prefer_one_fingerprint=prefer_one_fingerprint,
|
||||||
|
)
|
||||||
|
result.update(
|
||||||
|
{
|
||||||
|
"public_key_type": public_key_info["type"],
|
||||||
|
"public_key_data": public_key_info["public_data"],
|
||||||
|
"public_key_fingerprints": public_key_info["fingerprints"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
ski_bytes = self._get_subject_key_identifier()
|
||||||
|
ski = None
|
||||||
|
if ski_bytes is not None:
|
||||||
|
ski = binascii.hexlify(ski_bytes).decode("ascii")
|
||||||
|
ski = ":".join([ski[i : i + 2] for i in range(0, len(ski), 2)])
|
||||||
|
result["subject_key_identifier"] = ski
|
||||||
|
|
||||||
|
aki_bytes, aci, acsn = self._get_authority_key_identifier()
|
||||||
|
aki = None
|
||||||
|
if aki_bytes is not None:
|
||||||
|
aki = binascii.hexlify(aki_bytes).decode("ascii")
|
||||||
|
aki = ":".join([aki[i : i + 2] for i in range(0, len(aki), 2)])
|
||||||
|
result["authority_key_identifier"] = aki
|
||||||
|
result["authority_cert_issuer"] = aci
|
||||||
|
result["authority_cert_serial_number"] = acsn
|
||||||
|
|
||||||
|
result["extensions_by_oid"] = self._get_all_extensions()
|
||||||
|
|
||||||
|
result["signature_valid"] = self._is_signature_valid()
|
||||||
|
if self.validate_signature and not result["signature_valid"]:
|
||||||
|
self.module.fail_json(msg="CSR signature is invalid!", **result)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class CSRInfoRetrievalCryptography(CSRInfoRetrieval):
|
||||||
|
"""Validate the supplied CSR, using the cryptography backend"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, *, module: GeneralAnsibleModule, content: bytes, validate_signature: bool
|
||||||
|
) -> None:
|
||||||
|
super().__init__(
|
||||||
|
module=module, content=content, validate_signature=validate_signature
|
||||||
|
)
|
||||||
|
self.name_encoding: t.Literal["ignore", "idna", "unicode"] = module.params.get(
|
||||||
|
"name_encoding", "ignore"
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_subject_ordered(self) -> list[list[str]]:
|
||||||
|
result: list[list[str]] = []
|
||||||
|
for attribute in self.csr.subject:
|
||||||
|
result.append(
|
||||||
|
[cryptography_oid_to_name(attribute.oid), to_text(attribute.value)]
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _get_key_usage(self) -> tuple[list[str] | None, bool]:
|
||||||
|
try:
|
||||||
|
current_key_ext = self.csr.extensions.get_extension_for_class(x509.KeyUsage)
|
||||||
|
current_key_usage = current_key_ext.value
|
||||||
|
key_usage = {
|
||||||
|
"digital_signature": current_key_usage.digital_signature,
|
||||||
|
"content_commitment": current_key_usage.content_commitment,
|
||||||
|
"key_encipherment": current_key_usage.key_encipherment,
|
||||||
|
"data_encipherment": current_key_usage.data_encipherment,
|
||||||
|
"key_agreement": current_key_usage.key_agreement,
|
||||||
|
"key_cert_sign": current_key_usage.key_cert_sign,
|
||||||
|
"crl_sign": current_key_usage.crl_sign,
|
||||||
|
"encipher_only": False,
|
||||||
|
"decipher_only": False,
|
||||||
|
}
|
||||||
|
if key_usage["key_agreement"]:
|
||||||
|
key_usage.update(
|
||||||
|
{
|
||||||
|
"encipher_only": current_key_usage.encipher_only,
|
||||||
|
"decipher_only": current_key_usage.decipher_only,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
key_usage_names = {
|
||||||
|
"digital_signature": "Digital Signature",
|
||||||
|
"content_commitment": "Non Repudiation",
|
||||||
|
"key_encipherment": "Key Encipherment",
|
||||||
|
"data_encipherment": "Data Encipherment",
|
||||||
|
"key_agreement": "Key Agreement",
|
||||||
|
"key_cert_sign": "Certificate Sign",
|
||||||
|
"crl_sign": "CRL Sign",
|
||||||
|
"encipher_only": "Encipher Only",
|
||||||
|
"decipher_only": "Decipher Only",
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
sorted(
|
||||||
|
[
|
||||||
|
key_usage_names[name]
|
||||||
|
for name, value in key_usage.items()
|
||||||
|
if value
|
||||||
|
]
|
||||||
|
),
|
||||||
|
current_key_ext.critical,
|
||||||
|
)
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return None, False
|
||||||
|
|
||||||
|
def _get_extended_key_usage(self) -> tuple[list[str] | None, bool]:
|
||||||
|
try:
|
||||||
|
ext_keyusage_ext = self.csr.extensions.get_extension_for_class(
|
||||||
|
x509.ExtendedKeyUsage
|
||||||
|
)
|
||||||
|
return (
|
||||||
|
sorted(
|
||||||
|
[cryptography_oid_to_name(eku) for eku in ext_keyusage_ext.value]
|
||||||
|
),
|
||||||
|
ext_keyusage_ext.critical,
|
||||||
|
)
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return None, False
|
||||||
|
|
||||||
|
def _get_basic_constraints(self) -> tuple[list[str] | None, bool]:
|
||||||
|
try:
|
||||||
|
ext_keyusage_ext = self.csr.extensions.get_extension_for_class(
|
||||||
|
x509.BasicConstraints
|
||||||
|
)
|
||||||
|
result = [f"CA:{'TRUE' if ext_keyusage_ext.value.ca else 'FALSE'}"]
|
||||||
|
if ext_keyusage_ext.value.path_length is not None:
|
||||||
|
result.append(f"pathlen:{ext_keyusage_ext.value.path_length}")
|
||||||
|
return sorted(result), ext_keyusage_ext.critical
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return None, False
|
||||||
|
|
||||||
|
def _get_ocsp_must_staple(self) -> tuple[bool | None, bool]:
|
||||||
|
try:
|
||||||
|
# This only works with cryptography >= 2.1
|
||||||
|
tlsfeature_ext = self.csr.extensions.get_extension_for_class(
|
||||||
|
x509.TLSFeature
|
||||||
|
)
|
||||||
|
value = (
|
||||||
|
cryptography.x509.TLSFeatureType.status_request in tlsfeature_ext.value
|
||||||
|
)
|
||||||
|
return value, tlsfeature_ext.critical
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return None, False
|
||||||
|
|
||||||
|
def _get_subject_alt_name(self) -> tuple[list[str] | None, bool]:
|
||||||
|
try:
|
||||||
|
san_ext = self.csr.extensions.get_extension_for_class(
|
||||||
|
x509.SubjectAlternativeName
|
||||||
|
)
|
||||||
|
result = [
|
||||||
|
cryptography_decode_name(san, idn_rewrite=self.name_encoding)
|
||||||
|
for san in san_ext.value
|
||||||
|
]
|
||||||
|
return result, san_ext.critical
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return None, False
|
||||||
|
|
||||||
|
def _get_name_constraints(self) -> tuple[list[str] | None, list[str] | None, bool]:
|
||||||
|
try:
|
||||||
|
nc_ext = self.csr.extensions.get_extension_for_class(x509.NameConstraints)
|
||||||
|
permitted = [
|
||||||
|
cryptography_decode_name(san, idn_rewrite=self.name_encoding)
|
||||||
|
for san in nc_ext.value.permitted_subtrees or []
|
||||||
|
]
|
||||||
|
excluded = [
|
||||||
|
cryptography_decode_name(san, idn_rewrite=self.name_encoding)
|
||||||
|
for san in nc_ext.value.excluded_subtrees or []
|
||||||
|
]
|
||||||
|
return permitted, excluded, nc_ext.critical
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return None, None, False
|
||||||
|
|
||||||
|
def _get_public_key_pem(self) -> bytes:
|
||||||
|
return self.csr.public_key().public_bytes(
|
||||||
|
serialization.Encoding.PEM,
|
||||||
|
serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_public_key_object(self) -> CertificatePublicKeyTypes:
|
||||||
|
return self.csr.public_key()
|
||||||
|
|
||||||
|
def _get_subject_key_identifier(self) -> bytes | None:
|
||||||
|
try:
|
||||||
|
ext = self.csr.extensions.get_extension_for_class(x509.SubjectKeyIdentifier)
|
||||||
|
return ext.value.digest
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _get_authority_key_identifier(
|
||||||
|
self,
|
||||||
|
) -> tuple[bytes | None, list[str] | None, int | None]:
|
||||||
|
try:
|
||||||
|
ext = self.csr.extensions.get_extension_for_class(
|
||||||
|
x509.AuthorityKeyIdentifier
|
||||||
|
)
|
||||||
|
issuer = None
|
||||||
|
if ext.value.authority_cert_issuer is not None:
|
||||||
|
issuer = [
|
||||||
|
cryptography_decode_name(san, idn_rewrite=self.name_encoding)
|
||||||
|
for san in ext.value.authority_cert_issuer
|
||||||
|
]
|
||||||
|
return (
|
||||||
|
ext.value.key_identifier,
|
||||||
|
issuer,
|
||||||
|
ext.value.authority_cert_serial_number,
|
||||||
|
)
|
||||||
|
except cryptography.x509.ExtensionNotFound:
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
|
def _get_all_extensions(self) -> dict[str, dict[str, bool | str]]:
|
||||||
|
return cryptography_get_extensions_from_csr(self.csr)
|
||||||
|
|
||||||
|
def _is_signature_valid(self) -> bool:
|
||||||
|
return self.csr.is_signature_valid
|
||||||
|
|
||||||
|
|
||||||
|
def get_csr_info(
|
||||||
|
*,
|
||||||
|
module: GeneralAnsibleModule,
|
||||||
|
content: bytes,
|
||||||
|
validate_signature: bool = True,
|
||||||
|
prefer_one_fingerprint: bool = False,
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
info = CSRInfoRetrievalCryptography(
|
||||||
|
module=module, content=content, validate_signature=validate_signature
|
||||||
|
)
|
||||||
|
return info.get_info(prefer_one_fingerprint=prefer_one_fingerprint)
|
||||||
|
|
||||||
|
|
||||||
|
def select_backend(
|
||||||
|
*, module: GeneralAnsibleModule, content: bytes, validate_signature: bool = True
|
||||||
|
) -> CSRInfoRetrieval:
|
||||||
|
assert_required_cryptography_version(
|
||||||
|
module, minimum_cryptography_version=MINIMAL_CRYPTOGRAPHY_VERSION
|
||||||
|
)
|
||||||
|
return CSRInfoRetrievalCryptography(
|
||||||
|
module=module, content=content, validate_signature=validate_signature
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("CSRInfoRetrieval", "get_csr_info", "select_backend")
|
||||||
676
plugins/module_utils/_crypto/module_backends/privatekey.py
Normal file
676
plugins/module_utils/_crypto/module_backends/privatekey.py
Normal file
@@ -0,0 +1,676 @@
|
|||||||
|
# Copyright (c) 2016, Yanis Guenane <yanis+ansible@guenane.org>
|
||||||
|
# Copyright (c) 2020, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import base64
|
||||||
|
import traceback
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._argspec import (
|
||||||
|
ArgumentSpec,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
|
OpenSSLObjectError,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.privatekey_info import (
|
||||||
|
PrivateKeyConsistencyError,
|
||||||
|
PrivateKeyParseError,
|
||||||
|
get_privatekey_info,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.pem import (
|
||||||
|
identify_private_key_format,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.support import (
|
||||||
|
get_fingerprint_of_privatekey,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._cryptography_dep import (
|
||||||
|
COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION,
|
||||||
|
assert_required_cryptography_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._action_module import (
|
||||||
|
AnsibleActionModule,
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import (
|
||||||
|
PrivateKeyTypes,
|
||||||
|
)
|
||||||
|
|
||||||
|
GeneralAnsibleModule = t.Union[AnsibleModule, AnsibleActionModule]
|
||||||
|
|
||||||
|
|
||||||
|
MINIMAL_CRYPTOGRAPHY_VERSION = COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cryptography
|
||||||
|
import cryptography.exceptions
|
||||||
|
import cryptography.hazmat.backends
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.dsa
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.ec
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.ed448
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.ed25519
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.rsa
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.utils
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.x448
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.x25519
|
||||||
|
import cryptography.hazmat.primitives.serialization
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateKeyError(OpenSSLObjectError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# From the object called `module`, only the following properties are used:
|
||||||
|
#
|
||||||
|
# - module.params[]
|
||||||
|
# - module.warn(msg: str)
|
||||||
|
# - module.fail_json(msg: str, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateKeyBackend(metaclass=abc.ABCMeta):
|
||||||
|
def __init__(self, *, module: GeneralAnsibleModule) -> None:
|
||||||
|
self.module = module
|
||||||
|
self.type: t.Literal[
|
||||||
|
"DSA", "ECC", "Ed25519", "Ed448", "RSA", "X25519", "X448"
|
||||||
|
] = module.params["type"]
|
||||||
|
self.size: int = module.params["size"]
|
||||||
|
self.curve: str | None = module.params["curve"]
|
||||||
|
self.passphrase: str | None = module.params["passphrase"]
|
||||||
|
self.cipher: str = module.params["cipher"]
|
||||||
|
self.format: t.Literal["pkcs1", "pkcs8", "raw", "auto", "auto_ignore"] = (
|
||||||
|
module.params["format"]
|
||||||
|
)
|
||||||
|
self.format_mismatch: t.Literal["regenerate", "convert"] = module.params.get(
|
||||||
|
"format_mismatch", "regenerate"
|
||||||
|
)
|
||||||
|
self.regenerate: t.Literal[
|
||||||
|
"never", "fail", "partial_idempotence", "full_idempotence", "always"
|
||||||
|
] = module.params.get("regenerate", "full_idempotence")
|
||||||
|
|
||||||
|
self.private_key: PrivateKeyTypes | None = None
|
||||||
|
|
||||||
|
self.existing_private_key: PrivateKeyTypes | None = None
|
||||||
|
self.existing_private_key_bytes: bytes | None = None
|
||||||
|
|
||||||
|
self.diff_before = self._get_info(data=None)
|
||||||
|
self.diff_after = self._get_info(data=None)
|
||||||
|
|
||||||
|
def _get_info(self, *, data: bytes | None) -> dict[str, t.Any]:
|
||||||
|
if data is None:
|
||||||
|
return {}
|
||||||
|
result: dict[str, t.Any] = {"can_parse_key": False}
|
||||||
|
try:
|
||||||
|
result.update(
|
||||||
|
get_privatekey_info(
|
||||||
|
module=self.module,
|
||||||
|
content=data,
|
||||||
|
passphrase=self.passphrase,
|
||||||
|
return_private_key_data=False,
|
||||||
|
prefer_one_fingerprint=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except PrivateKeyConsistencyError as exc:
|
||||||
|
result.update(exc.result)
|
||||||
|
except PrivateKeyParseError as exc:
|
||||||
|
result.update(exc.result)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return result
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def generate_private_key(self) -> None:
|
||||||
|
"""(Re-)Generate private key."""
|
||||||
|
|
||||||
|
def convert_private_key(self) -> None:
|
||||||
|
"""Convert existing private key (self.existing_private_key) to new private key (self.private_key).
|
||||||
|
|
||||||
|
This is effectively a copy without active conversion. The conversion is done
|
||||||
|
during load and store; get_private_key_data() uses the destination format to
|
||||||
|
serialize the key.
|
||||||
|
"""
|
||||||
|
self._ensure_existing_private_key_loaded()
|
||||||
|
self.private_key = self.existing_private_key
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def get_private_key_data(self) -> bytes:
|
||||||
|
"""Return bytes for self.private_key."""
|
||||||
|
|
||||||
|
def set_existing(self, *, privatekey_bytes: bytes | None) -> None:
|
||||||
|
"""Set existing private key bytes. None indicates that the key does not exist."""
|
||||||
|
self.existing_private_key_bytes = privatekey_bytes
|
||||||
|
self.diff_after = self.diff_before = self._get_info(
|
||||||
|
data=self.existing_private_key_bytes
|
||||||
|
)
|
||||||
|
|
||||||
|
def has_existing(self) -> bool:
|
||||||
|
"""Query whether an existing private key is/has been there."""
|
||||||
|
return self.existing_private_key_bytes is not None
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _check_passphrase(self) -> bool:
|
||||||
|
"""Check whether provided passphrase matches, assuming self.existing_private_key_bytes has been populated."""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _ensure_existing_private_key_loaded(self) -> None:
|
||||||
|
"""Make sure that self.existing_private_key is populated from self.existing_private_key_bytes."""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _check_size_and_type(self) -> bool:
|
||||||
|
"""Check whether provided size and type matches, assuming self.existing_private_key has been populated."""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _check_format(self) -> bool:
|
||||||
|
"""Check whether the key file format, assuming self.existing_private_key and self.existing_private_key_bytes has been populated."""
|
||||||
|
|
||||||
|
def needs_regeneration(self) -> bool:
|
||||||
|
"""Check whether a regeneration is necessary."""
|
||||||
|
if self.regenerate == "always":
|
||||||
|
return True
|
||||||
|
if not self.has_existing():
|
||||||
|
# key does not exist
|
||||||
|
return True
|
||||||
|
if not self._check_passphrase():
|
||||||
|
if self.regenerate == "full_idempotence":
|
||||||
|
return True
|
||||||
|
self.module.fail_json(
|
||||||
|
msg="Unable to read the key. The key is protected with a another passphrase / no passphrase or broken."
|
||||||
|
" Will not proceed. To force regeneration, call the module with `generate`"
|
||||||
|
" set to `full_idempotence` or `always`, or with `force=true`."
|
||||||
|
)
|
||||||
|
self._ensure_existing_private_key_loaded()
|
||||||
|
if self.regenerate != "never":
|
||||||
|
if not self._check_size_and_type():
|
||||||
|
if self.regenerate in ("partial_idempotence", "full_idempotence"):
|
||||||
|
return True
|
||||||
|
self.module.fail_json(
|
||||||
|
msg="Key has wrong type and/or size."
|
||||||
|
" Will not proceed. To force regeneration, call the module with `generate`"
|
||||||
|
" set to `partial_idempotence`, `full_idempotence` or `always`, or with `force=true`."
|
||||||
|
)
|
||||||
|
# During generation step, regenerate if format does not match and format_mismatch == 'regenerate'
|
||||||
|
if self.format_mismatch == "regenerate" and self.regenerate != "never":
|
||||||
|
if not self._check_format():
|
||||||
|
if self.regenerate in ("partial_idempotence", "full_idempotence"):
|
||||||
|
return True
|
||||||
|
self.module.fail_json(
|
||||||
|
msg="Key has wrong format."
|
||||||
|
" Will not proceed. To force regeneration, call the module with `generate`"
|
||||||
|
" set to `partial_idempotence`, `full_idempotence` or `always`, or with `force=true`."
|
||||||
|
" To convert the key, set `format_mismatch` to `convert`."
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def needs_conversion(self) -> bool:
|
||||||
|
"""Check whether a conversion is necessary. Must only be called if needs_regeneration() returned False."""
|
||||||
|
# During conversion step, convert if format does not match and format_mismatch == 'convert'
|
||||||
|
self._ensure_existing_private_key_loaded()
|
||||||
|
return (
|
||||||
|
self.has_existing()
|
||||||
|
and self.format_mismatch == "convert"
|
||||||
|
and not self._check_format()
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_fingerprint(self) -> dict[str, str] | None:
|
||||||
|
if self.private_key:
|
||||||
|
return get_fingerprint_of_privatekey(self.private_key)
|
||||||
|
try:
|
||||||
|
self._ensure_existing_private_key_loaded()
|
||||||
|
except Exception:
|
||||||
|
# Ignore errors
|
||||||
|
pass
|
||||||
|
if self.existing_private_key:
|
||||||
|
return get_fingerprint_of_privatekey(self.existing_private_key)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def dump(self, *, include_key: bool) -> dict[str, t.Any]:
|
||||||
|
"""Serialize the object into a dictionary."""
|
||||||
|
|
||||||
|
if not self.private_key:
|
||||||
|
try:
|
||||||
|
self._ensure_existing_private_key_loaded()
|
||||||
|
except Exception:
|
||||||
|
# Ignore errors
|
||||||
|
pass
|
||||||
|
result: dict[str, t.Any] = {
|
||||||
|
"type": self.type,
|
||||||
|
"size": self.size,
|
||||||
|
"fingerprint": self._get_fingerprint(),
|
||||||
|
}
|
||||||
|
if self.type == "ECC":
|
||||||
|
result["curve"] = self.curve
|
||||||
|
# Get hold of private key bytes
|
||||||
|
pk_bytes = self.existing_private_key_bytes
|
||||||
|
if self.private_key is not None:
|
||||||
|
pk_bytes = self.get_private_key_data()
|
||||||
|
self.diff_after = self._get_info(data=pk_bytes)
|
||||||
|
if include_key:
|
||||||
|
# Store result
|
||||||
|
if pk_bytes:
|
||||||
|
if identify_private_key_format(pk_bytes) == "raw":
|
||||||
|
result["privatekey"] = base64.b64encode(pk_bytes)
|
||||||
|
else:
|
||||||
|
result["privatekey"] = pk_bytes.decode("utf-8")
|
||||||
|
else:
|
||||||
|
result["privatekey"] = None
|
||||||
|
|
||||||
|
result["diff"] = {
|
||||||
|
"before": self.diff_before,
|
||||||
|
"after": self.diff_after,
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class _Curve:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
name: str,
|
||||||
|
ectype: str,
|
||||||
|
deprecated: bool,
|
||||||
|
) -> None:
|
||||||
|
self.name = name
|
||||||
|
self.ectype = ectype
|
||||||
|
self.deprecated = deprecated
|
||||||
|
|
||||||
|
def _get_ec_class(
|
||||||
|
self, *, module: GeneralAnsibleModule
|
||||||
|
) -> type[cryptography.hazmat.primitives.asymmetric.ec.EllipticCurve]:
|
||||||
|
ecclass = cryptography.hazmat.primitives.asymmetric.ec.__dict__.get(self.ectype) # type: ignore
|
||||||
|
if ecclass is None:
|
||||||
|
module.fail_json(
|
||||||
|
msg=f"Your cryptography version does not support {self.ectype}"
|
||||||
|
)
|
||||||
|
return ecclass
|
||||||
|
|
||||||
|
def create(
|
||||||
|
self, *, size: int, module: GeneralAnsibleModule
|
||||||
|
) -> cryptography.hazmat.primitives.asymmetric.ec.EllipticCurve:
|
||||||
|
ecclass = self._get_ec_class(module=module)
|
||||||
|
return ecclass()
|
||||||
|
|
||||||
|
def verify(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
privatekey: cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey,
|
||||||
|
module: GeneralAnsibleModule,
|
||||||
|
) -> bool:
|
||||||
|
ecclass = self._get_ec_class(module=module)
|
||||||
|
return isinstance(privatekey.private_numbers().public_numbers.curve, ecclass)
|
||||||
|
|
||||||
|
|
||||||
|
# Implementation with using cryptography
|
||||||
|
class PrivateKeyCryptographyBackend(PrivateKeyBackend):
|
||||||
|
|
||||||
|
def _add_curve(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
ectype: str,
|
||||||
|
*,
|
||||||
|
deprecated: bool = False,
|
||||||
|
) -> None:
|
||||||
|
self.curves[name] = _Curve(name=name, ectype=ectype, deprecated=deprecated)
|
||||||
|
|
||||||
|
def __init__(self, module: GeneralAnsibleModule) -> None:
|
||||||
|
super().__init__(module=module)
|
||||||
|
|
||||||
|
self.curves: dict[str, _Curve] = {}
|
||||||
|
self._add_curve("secp224r1", "SECP224R1")
|
||||||
|
self._add_curve("secp256k1", "SECP256K1")
|
||||||
|
self._add_curve("secp256r1", "SECP256R1")
|
||||||
|
self._add_curve("secp384r1", "SECP384R1")
|
||||||
|
self._add_curve("secp521r1", "SECP521R1")
|
||||||
|
self._add_curve("secp192r1", "SECP192R1", deprecated=True)
|
||||||
|
self._add_curve("sect163k1", "SECT163K1", deprecated=True)
|
||||||
|
self._add_curve("sect163r2", "SECT163R2", deprecated=True)
|
||||||
|
self._add_curve("sect233k1", "SECT233K1", deprecated=True)
|
||||||
|
self._add_curve("sect233r1", "SECT233R1", deprecated=True)
|
||||||
|
self._add_curve("sect283k1", "SECT283K1", deprecated=True)
|
||||||
|
self._add_curve("sect283r1", "SECT283R1", deprecated=True)
|
||||||
|
self._add_curve("sect409k1", "SECT409K1", deprecated=True)
|
||||||
|
self._add_curve("sect409r1", "SECT409R1", deprecated=True)
|
||||||
|
self._add_curve("sect571k1", "SECT571K1", deprecated=True)
|
||||||
|
self._add_curve("sect571r1", "SECT571R1", deprecated=True)
|
||||||
|
self._add_curve("brainpoolP256r1", "BrainpoolP256R1", deprecated=True)
|
||||||
|
self._add_curve("brainpoolP384r1", "BrainpoolP384R1", deprecated=True)
|
||||||
|
self._add_curve("brainpoolP512r1", "BrainpoolP512R1", deprecated=True)
|
||||||
|
|
||||||
|
def _get_wanted_format(self) -> t.Literal["pkcs1", "pkcs8", "raw"]:
|
||||||
|
if self.format not in ("auto", "auto_ignore"):
|
||||||
|
return self.format # type: ignore
|
||||||
|
if self.type in ("X25519", "X448", "Ed25519", "Ed448"):
|
||||||
|
return "pkcs8"
|
||||||
|
return "pkcs1"
|
||||||
|
|
||||||
|
def generate_private_key(self) -> None:
|
||||||
|
"""(Re-)Generate private key."""
|
||||||
|
try:
|
||||||
|
if self.type == "RSA":
|
||||||
|
self.private_key = (
|
||||||
|
cryptography.hazmat.primitives.asymmetric.rsa.generate_private_key(
|
||||||
|
public_exponent=65537, # OpenSSL always uses this
|
||||||
|
key_size=self.size,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if self.type == "DSA":
|
||||||
|
self.private_key = (
|
||||||
|
cryptography.hazmat.primitives.asymmetric.dsa.generate_private_key(
|
||||||
|
key_size=self.size
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if self.type == "X25519":
|
||||||
|
self.private_key = (
|
||||||
|
cryptography.hazmat.primitives.asymmetric.x25519.X25519PrivateKey.generate()
|
||||||
|
)
|
||||||
|
if self.type == "X448":
|
||||||
|
self.private_key = (
|
||||||
|
cryptography.hazmat.primitives.asymmetric.x448.X448PrivateKey.generate()
|
||||||
|
)
|
||||||
|
if self.type == "Ed25519":
|
||||||
|
self.private_key = (
|
||||||
|
cryptography.hazmat.primitives.asymmetric.ed25519.Ed25519PrivateKey.generate()
|
||||||
|
)
|
||||||
|
if self.type == "Ed448":
|
||||||
|
self.private_key = (
|
||||||
|
cryptography.hazmat.primitives.asymmetric.ed448.Ed448PrivateKey.generate()
|
||||||
|
)
|
||||||
|
if self.type == "ECC" and self.curve in self.curves:
|
||||||
|
if self.curves[self.curve].deprecated:
|
||||||
|
self.module.warn(
|
||||||
|
f"Elliptic curves of type {self.curve} should not be used for new keys!"
|
||||||
|
)
|
||||||
|
self.private_key = (
|
||||||
|
cryptography.hazmat.primitives.asymmetric.ec.generate_private_key(
|
||||||
|
curve=self.curves[self.curve].create(
|
||||||
|
size=self.size, module=self.module
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except cryptography.exceptions.UnsupportedAlgorithm:
|
||||||
|
self.module.fail_json(
|
||||||
|
msg=f"Cryptography backend does not support the algorithm required for {self.type}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_private_key_data(self) -> bytes:
|
||||||
|
"""Return bytes for self.private_key"""
|
||||||
|
if self.private_key is None:
|
||||||
|
raise AssertionError("private_key not set")
|
||||||
|
# Select export format and encoding
|
||||||
|
try:
|
||||||
|
export_format_txt = self._get_wanted_format()
|
||||||
|
export_encoding = cryptography.hazmat.primitives.serialization.Encoding.PEM
|
||||||
|
if export_format_txt == "pkcs1":
|
||||||
|
# "TraditionalOpenSSL" format is PKCS1
|
||||||
|
export_format = (
|
||||||
|
cryptography.hazmat.primitives.serialization.PrivateFormat.TraditionalOpenSSL
|
||||||
|
)
|
||||||
|
elif export_format_txt == "pkcs8":
|
||||||
|
export_format = (
|
||||||
|
cryptography.hazmat.primitives.serialization.PrivateFormat.PKCS8
|
||||||
|
)
|
||||||
|
elif export_format_txt == "raw":
|
||||||
|
export_format = (
|
||||||
|
cryptography.hazmat.primitives.serialization.PrivateFormat.Raw
|
||||||
|
)
|
||||||
|
export_encoding = (
|
||||||
|
cryptography.hazmat.primitives.serialization.Encoding.Raw
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# pylint does not notice that all possible values for export_format_txt have been covered.
|
||||||
|
raise AssertionError("Can never be reached") # pragma: no cover
|
||||||
|
except AttributeError:
|
||||||
|
self.module.fail_json(
|
||||||
|
msg=f'Cryptography backend does not support the selected output format "{self.format}"'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Select key encryption
|
||||||
|
encryption_algorithm: (
|
||||||
|
cryptography.hazmat.primitives.serialization.KeySerializationEncryption
|
||||||
|
) = cryptography.hazmat.primitives.serialization.NoEncryption()
|
||||||
|
if self.cipher and self.passphrase:
|
||||||
|
if self.cipher == "auto":
|
||||||
|
encryption_algorithm = cryptography.hazmat.primitives.serialization.BestAvailableEncryption(
|
||||||
|
to_bytes(self.passphrase)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.module.fail_json(
|
||||||
|
msg='Cryptography backend can only use "auto" for cipher option.'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Serialize key
|
||||||
|
try:
|
||||||
|
return self.private_key.private_bytes(
|
||||||
|
encoding=export_encoding,
|
||||||
|
format=export_format,
|
||||||
|
encryption_algorithm=encryption_algorithm,
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
self.module.fail_json(
|
||||||
|
msg=f'Cryptography backend cannot serialize the private key in the required format "{self.format}"'
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
self.module.fail_json(
|
||||||
|
msg=f'Error while serializing the private key in the required format "{self.format}"',
|
||||||
|
exception=traceback.format_exc(),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _load_privatekey(self) -> PrivateKeyTypes:
|
||||||
|
data = self.existing_private_key_bytes
|
||||||
|
if data is None:
|
||||||
|
raise AssertionError("existing_private_key_bytes not set")
|
||||||
|
try:
|
||||||
|
# Interpret bytes depending on format.
|
||||||
|
key_format = identify_private_key_format(data)
|
||||||
|
if key_format == "raw":
|
||||||
|
if len(data) == 56:
|
||||||
|
return cryptography.hazmat.primitives.asymmetric.x448.X448PrivateKey.from_private_bytes(
|
||||||
|
data
|
||||||
|
)
|
||||||
|
if len(data) == 57:
|
||||||
|
return cryptography.hazmat.primitives.asymmetric.ed448.Ed448PrivateKey.from_private_bytes(
|
||||||
|
data
|
||||||
|
)
|
||||||
|
if len(data) == 32:
|
||||||
|
if self.type == "X25519":
|
||||||
|
return cryptography.hazmat.primitives.asymmetric.x25519.X25519PrivateKey.from_private_bytes(
|
||||||
|
data
|
||||||
|
)
|
||||||
|
if self.type == "Ed25519":
|
||||||
|
return cryptography.hazmat.primitives.asymmetric.ed25519.Ed25519PrivateKey.from_private_bytes(
|
||||||
|
data
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
return cryptography.hazmat.primitives.asymmetric.x25519.X25519PrivateKey.from_private_bytes(
|
||||||
|
data
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
return cryptography.hazmat.primitives.asymmetric.ed25519.Ed25519PrivateKey.from_private_bytes(
|
||||||
|
data
|
||||||
|
)
|
||||||
|
raise PrivateKeyError("Cannot load raw key")
|
||||||
|
|
||||||
|
return cryptography.hazmat.primitives.serialization.load_pem_private_key(
|
||||||
|
data,
|
||||||
|
None if self.passphrase is None else to_bytes(self.passphrase),
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
raise PrivateKeyError(e) from e
|
||||||
|
|
||||||
|
def _ensure_existing_private_key_loaded(self) -> None:
|
||||||
|
if self.existing_private_key is None and self.has_existing():
|
||||||
|
self.existing_private_key = self._load_privatekey()
|
||||||
|
|
||||||
|
def _check_passphrase(self) -> bool:
|
||||||
|
if self.existing_private_key_bytes is None:
|
||||||
|
raise AssertionError("existing_private_key_bytes not set")
|
||||||
|
try:
|
||||||
|
key_format = identify_private_key_format(self.existing_private_key_bytes)
|
||||||
|
if key_format == "raw":
|
||||||
|
# Raw keys cannot be encrypted. To avoid incompatibilities, we try to
|
||||||
|
# actually load the key (and return False when this fails).
|
||||||
|
self._load_privatekey()
|
||||||
|
# Loading the key succeeded. Only return True when no passphrase was
|
||||||
|
# provided.
|
||||||
|
return self.passphrase is None
|
||||||
|
return bool(
|
||||||
|
cryptography.hazmat.primitives.serialization.load_pem_private_key(
|
||||||
|
self.existing_private_key_bytes,
|
||||||
|
None if self.passphrase is None else to_bytes(self.passphrase),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _check_size_and_type(self) -> bool:
|
||||||
|
if isinstance(
|
||||||
|
self.existing_private_key,
|
||||||
|
cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey,
|
||||||
|
):
|
||||||
|
return (
|
||||||
|
self.type == "RSA" and self.size == self.existing_private_key.key_size
|
||||||
|
)
|
||||||
|
if isinstance(
|
||||||
|
self.existing_private_key,
|
||||||
|
cryptography.hazmat.primitives.asymmetric.dsa.DSAPrivateKey,
|
||||||
|
):
|
||||||
|
return (
|
||||||
|
self.type == "DSA" and self.size == self.existing_private_key.key_size
|
||||||
|
)
|
||||||
|
if isinstance(
|
||||||
|
self.existing_private_key,
|
||||||
|
cryptography.hazmat.primitives.asymmetric.x25519.X25519PrivateKey,
|
||||||
|
):
|
||||||
|
return self.type == "X25519"
|
||||||
|
if isinstance(
|
||||||
|
self.existing_private_key,
|
||||||
|
cryptography.hazmat.primitives.asymmetric.x448.X448PrivateKey,
|
||||||
|
):
|
||||||
|
return self.type == "X448"
|
||||||
|
if isinstance(
|
||||||
|
self.existing_private_key,
|
||||||
|
cryptography.hazmat.primitives.asymmetric.ed25519.Ed25519PrivateKey,
|
||||||
|
):
|
||||||
|
return self.type == "Ed25519"
|
||||||
|
if isinstance(
|
||||||
|
self.existing_private_key,
|
||||||
|
cryptography.hazmat.primitives.asymmetric.ed448.Ed448PrivateKey,
|
||||||
|
):
|
||||||
|
return self.type == "Ed448"
|
||||||
|
if isinstance(
|
||||||
|
self.existing_private_key,
|
||||||
|
cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey,
|
||||||
|
):
|
||||||
|
if self.type != "ECC":
|
||||||
|
return False
|
||||||
|
if self.curve not in self.curves:
|
||||||
|
return False
|
||||||
|
return self.curves[self.curve].verify(
|
||||||
|
privatekey=self.existing_private_key, module=self.module
|
||||||
|
)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _check_format(self) -> bool:
|
||||||
|
if self.existing_private_key_bytes is None:
|
||||||
|
raise AssertionError("existing_private_key_bytes not set")
|
||||||
|
if self.format == "auto_ignore":
|
||||||
|
return True
|
||||||
|
try:
|
||||||
|
key_format = identify_private_key_format(self.existing_private_key_bytes)
|
||||||
|
return key_format == self._get_wanted_format()
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def select_backend(module: GeneralAnsibleModule) -> PrivateKeyBackend:
|
||||||
|
assert_required_cryptography_version(
|
||||||
|
module, minimum_cryptography_version=MINIMAL_CRYPTOGRAPHY_VERSION
|
||||||
|
)
|
||||||
|
return PrivateKeyCryptographyBackend(module=module)
|
||||||
|
|
||||||
|
|
||||||
|
def get_privatekey_argument_spec() -> ArgumentSpec:
|
||||||
|
return ArgumentSpec(
|
||||||
|
argument_spec={
|
||||||
|
"size": {"type": "int", "default": 4096},
|
||||||
|
"type": {
|
||||||
|
"type": "str",
|
||||||
|
"default": "RSA",
|
||||||
|
"choices": ["DSA", "ECC", "Ed25519", "Ed448", "RSA", "X25519", "X448"],
|
||||||
|
},
|
||||||
|
"curve": {
|
||||||
|
"type": "str",
|
||||||
|
"choices": [
|
||||||
|
"secp224r1",
|
||||||
|
"secp256k1",
|
||||||
|
"secp256r1",
|
||||||
|
"secp384r1",
|
||||||
|
"secp521r1",
|
||||||
|
"secp192r1",
|
||||||
|
"brainpoolP256r1",
|
||||||
|
"brainpoolP384r1",
|
||||||
|
"brainpoolP512r1",
|
||||||
|
"sect163k1",
|
||||||
|
"sect163r2",
|
||||||
|
"sect233k1",
|
||||||
|
"sect233r1",
|
||||||
|
"sect283k1",
|
||||||
|
"sect283r1",
|
||||||
|
"sect409k1",
|
||||||
|
"sect409r1",
|
||||||
|
"sect571k1",
|
||||||
|
"sect571r1",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"passphrase": {"type": "str", "no_log": True},
|
||||||
|
"cipher": {"type": "str", "default": "auto"},
|
||||||
|
"format": {
|
||||||
|
"type": "str",
|
||||||
|
"default": "auto_ignore",
|
||||||
|
"choices": ["pkcs1", "pkcs8", "raw", "auto", "auto_ignore"],
|
||||||
|
},
|
||||||
|
"format_mismatch": {
|
||||||
|
"type": "str",
|
||||||
|
"default": "regenerate",
|
||||||
|
"choices": ["regenerate", "convert"],
|
||||||
|
},
|
||||||
|
"select_crypto_backend": {
|
||||||
|
"type": "str",
|
||||||
|
"choices": ["auto", "cryptography"],
|
||||||
|
"default": "auto",
|
||||||
|
},
|
||||||
|
"regenerate": {
|
||||||
|
"type": "str",
|
||||||
|
"default": "full_idempotence",
|
||||||
|
"choices": [
|
||||||
|
"never",
|
||||||
|
"fail",
|
||||||
|
"partial_idempotence",
|
||||||
|
"full_idempotence",
|
||||||
|
"always",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required_if=[
|
||||||
|
("type", "ECC", ["curve"]),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"PrivateKeyError",
|
||||||
|
"PrivateKeyBackend",
|
||||||
|
"select_backend",
|
||||||
|
"get_privatekey_argument_spec",
|
||||||
|
)
|
||||||
@@ -0,0 +1,313 @@
|
|||||||
|
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import traceback
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._argspec import (
|
||||||
|
ArgumentSpec,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
|
OpenSSLObjectError,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_support import (
|
||||||
|
cryptography_compare_private_keys,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.pem import (
|
||||||
|
identify_private_key_format,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._cryptography_dep import (
|
||||||
|
COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION,
|
||||||
|
assert_required_cryptography_version,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._io import load_file
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import (
|
||||||
|
PrivateKeyTypes,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
MINIMAL_CRYPTOGRAPHY_VERSION = COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cryptography
|
||||||
|
import cryptography.exceptions
|
||||||
|
import cryptography.hazmat.backends
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.dsa
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.ec
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.ed448
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.ed25519
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.rsa
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.utils
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.x448
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.x25519
|
||||||
|
import cryptography.hazmat.primitives.serialization
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateKeyError(OpenSSLObjectError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# From the object called `module`, only the following properties are used:
|
||||||
|
#
|
||||||
|
# - module.params[]
|
||||||
|
# - module.warn(msg: str)
|
||||||
|
# - module.fail_json(msg: str, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateKeyConvertBackend(metaclass=abc.ABCMeta):
|
||||||
|
def __init__(self, *, module: AnsibleModule) -> None:
|
||||||
|
self.module = module
|
||||||
|
self.src_path: str | None = module.params["src_path"]
|
||||||
|
self.src_content: str | None = module.params["src_content"]
|
||||||
|
self.src_passphrase: str | None = module.params["src_passphrase"]
|
||||||
|
self.format: t.Literal["pkcs1", "pkcs8", "raw"] = module.params["format"]
|
||||||
|
self.dest_passphrase: str | None = module.params["dest_passphrase"]
|
||||||
|
|
||||||
|
self.src_private_key: PrivateKeyTypes | None = None
|
||||||
|
if self.src_path is not None:
|
||||||
|
self.src_private_key_bytes = load_file(path=self.src_path, module=module)
|
||||||
|
else:
|
||||||
|
if self.src_content is None:
|
||||||
|
raise AssertionError("src_content is None")
|
||||||
|
self.src_private_key_bytes = self.src_content.encode("utf-8")
|
||||||
|
|
||||||
|
self.dest_private_key: PrivateKeyTypes | None = None
|
||||||
|
self.dest_private_key_bytes: bytes | None = None
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def get_private_key_data(self) -> bytes:
|
||||||
|
"""Return bytes for self.src_private_key in output format."""
|
||||||
|
|
||||||
|
def set_existing_destination(self, *, privatekey_bytes: bytes | None) -> None:
|
||||||
|
"""Set existing private key bytes. None indicates that the key does not exist."""
|
||||||
|
self.dest_private_key_bytes = privatekey_bytes
|
||||||
|
|
||||||
|
def has_existing_destination(self) -> bool:
|
||||||
|
"""Query whether an existing private key is/has been there."""
|
||||||
|
return self.dest_private_key_bytes is not None
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _load_private_key(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
data: bytes,
|
||||||
|
passphrase: str | None,
|
||||||
|
current_hint: PrivateKeyTypes | None = None,
|
||||||
|
) -> tuple[str, PrivateKeyTypes]:
|
||||||
|
"""Check whether data can be loaded as a private key with the provided passphrase. Return tuple (type, private_key)."""
|
||||||
|
|
||||||
|
def needs_conversion(self) -> bool:
|
||||||
|
"""Check whether a conversion is necessary. Must only be called if needs_regeneration() returned False."""
|
||||||
|
dummy, self.src_private_key = self._load_private_key(
|
||||||
|
data=self.src_private_key_bytes, passphrase=self.src_passphrase
|
||||||
|
)
|
||||||
|
|
||||||
|
if not self.has_existing_destination():
|
||||||
|
return True
|
||||||
|
assert self.dest_private_key_bytes is not None
|
||||||
|
|
||||||
|
try:
|
||||||
|
key_format, self.dest_private_key = self._load_private_key(
|
||||||
|
data=self.dest_private_key_bytes,
|
||||||
|
passphrase=self.dest_passphrase,
|
||||||
|
current_hint=self.src_private_key,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return key_format != self.format or not cryptography_compare_private_keys(
|
||||||
|
self.dest_private_key, self.src_private_key
|
||||||
|
)
|
||||||
|
|
||||||
|
def dump(self) -> dict[str, t.Any]:
|
||||||
|
"""Serialize the object into a dictionary."""
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
# Implementation with using cryptography
|
||||||
|
class PrivateKeyConvertCryptographyBackend(PrivateKeyConvertBackend):
|
||||||
|
def __init__(self, *, module: AnsibleModule) -> None:
|
||||||
|
super().__init__(module=module)
|
||||||
|
|
||||||
|
def get_private_key_data(self) -> bytes:
|
||||||
|
"""Return bytes for self.src_private_key in output format"""
|
||||||
|
if self.src_private_key is None:
|
||||||
|
raise AssertionError("src_private_key not set")
|
||||||
|
# Select export format and encoding
|
||||||
|
try:
|
||||||
|
export_encoding = cryptography.hazmat.primitives.serialization.Encoding.PEM
|
||||||
|
if self.format == "pkcs1":
|
||||||
|
# "TraditionalOpenSSL" format is PKCS1
|
||||||
|
export_format = (
|
||||||
|
cryptography.hazmat.primitives.serialization.PrivateFormat.TraditionalOpenSSL
|
||||||
|
)
|
||||||
|
elif self.format == "pkcs8":
|
||||||
|
export_format = (
|
||||||
|
cryptography.hazmat.primitives.serialization.PrivateFormat.PKCS8
|
||||||
|
)
|
||||||
|
elif self.format == "raw":
|
||||||
|
export_format = (
|
||||||
|
cryptography.hazmat.primitives.serialization.PrivateFormat.Raw
|
||||||
|
)
|
||||||
|
export_encoding = (
|
||||||
|
cryptography.hazmat.primitives.serialization.Encoding.Raw
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# pylint does not notice that all possible values for self.format have been covered.
|
||||||
|
raise AssertionError("Can never be reached") # pragma: no cover
|
||||||
|
except AttributeError:
|
||||||
|
self.module.fail_json(
|
||||||
|
msg=f'Cryptography backend does not support the selected output format "{self.format}"'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Select key encryption
|
||||||
|
encryption_algorithm: (
|
||||||
|
cryptography.hazmat.primitives.serialization.KeySerializationEncryption
|
||||||
|
) = cryptography.hazmat.primitives.serialization.NoEncryption()
|
||||||
|
if self.dest_passphrase:
|
||||||
|
encryption_algorithm = (
|
||||||
|
cryptography.hazmat.primitives.serialization.BestAvailableEncryption(
|
||||||
|
to_bytes(self.dest_passphrase)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Serialize key
|
||||||
|
try:
|
||||||
|
return self.src_private_key.private_bytes(
|
||||||
|
encoding=export_encoding,
|
||||||
|
format=export_format,
|
||||||
|
encryption_algorithm=encryption_algorithm,
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
self.module.fail_json(
|
||||||
|
msg=f'Cryptography backend cannot serialize the private key in the required format "{self.format}"'
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
self.module.fail_json(
|
||||||
|
msg=f'Error while serializing the private key in the required format "{self.format}"',
|
||||||
|
exception=traceback.format_exc(),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _load_private_key(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
data: bytes,
|
||||||
|
passphrase: str | None,
|
||||||
|
current_hint: PrivateKeyTypes | None = None,
|
||||||
|
) -> tuple[str, PrivateKeyTypes]:
|
||||||
|
try:
|
||||||
|
# Interpret bytes depending on format.
|
||||||
|
key_format = identify_private_key_format(data)
|
||||||
|
if key_format == "raw":
|
||||||
|
if passphrase is not None:
|
||||||
|
raise PrivateKeyError("Cannot load raw key with passphrase")
|
||||||
|
if len(data) == 56:
|
||||||
|
return (
|
||||||
|
key_format,
|
||||||
|
cryptography.hazmat.primitives.asymmetric.x448.X448PrivateKey.from_private_bytes(
|
||||||
|
data
|
||||||
|
),
|
||||||
|
)
|
||||||
|
if len(data) == 57:
|
||||||
|
return (
|
||||||
|
key_format,
|
||||||
|
cryptography.hazmat.primitives.asymmetric.ed448.Ed448PrivateKey.from_private_bytes(
|
||||||
|
data
|
||||||
|
),
|
||||||
|
)
|
||||||
|
if len(data) == 32:
|
||||||
|
if isinstance(
|
||||||
|
current_hint,
|
||||||
|
cryptography.hazmat.primitives.asymmetric.x25519.X25519PrivateKey,
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
return (
|
||||||
|
key_format,
|
||||||
|
cryptography.hazmat.primitives.asymmetric.x25519.X25519PrivateKey.from_private_bytes(
|
||||||
|
data
|
||||||
|
),
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
return (
|
||||||
|
key_format,
|
||||||
|
cryptography.hazmat.primitives.asymmetric.ed25519.Ed25519PrivateKey.from_private_bytes(
|
||||||
|
data
|
||||||
|
),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
return (
|
||||||
|
key_format,
|
||||||
|
cryptography.hazmat.primitives.asymmetric.ed25519.Ed25519PrivateKey.from_private_bytes(
|
||||||
|
data
|
||||||
|
),
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
return (
|
||||||
|
key_format,
|
||||||
|
cryptography.hazmat.primitives.asymmetric.x25519.X25519PrivateKey.from_private_bytes(
|
||||||
|
data
|
||||||
|
),
|
||||||
|
)
|
||||||
|
raise PrivateKeyError("Cannot load raw key")
|
||||||
|
|
||||||
|
return (
|
||||||
|
key_format,
|
||||||
|
cryptography.hazmat.primitives.serialization.load_pem_private_key(
|
||||||
|
data,
|
||||||
|
None if passphrase is None else to_bytes(passphrase),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
raise PrivateKeyError(e) from e
|
||||||
|
|
||||||
|
|
||||||
|
def select_backend(module: AnsibleModule) -> PrivateKeyConvertBackend:
|
||||||
|
assert_required_cryptography_version(
|
||||||
|
module, minimum_cryptography_version=MINIMAL_CRYPTOGRAPHY_VERSION
|
||||||
|
)
|
||||||
|
return PrivateKeyConvertCryptographyBackend(module=module)
|
||||||
|
|
||||||
|
|
||||||
|
def get_privatekey_argument_spec() -> ArgumentSpec:
|
||||||
|
return ArgumentSpec(
|
||||||
|
argument_spec={
|
||||||
|
"src_path": {"type": "path"},
|
||||||
|
"src_content": {"type": "str"},
|
||||||
|
"src_passphrase": {"type": "str", "no_log": True},
|
||||||
|
"dest_passphrase": {"type": "str", "no_log": True},
|
||||||
|
"format": {
|
||||||
|
"type": "str",
|
||||||
|
"required": True,
|
||||||
|
"choices": ["pkcs1", "pkcs8", "raw"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
mutually_exclusive=[
|
||||||
|
["src_path", "src_content"],
|
||||||
|
],
|
||||||
|
required_one_of=[
|
||||||
|
["src_path", "src_content"],
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"PrivateKeyError",
|
||||||
|
"PrivateKeyConvertBackend",
|
||||||
|
"select_backend",
|
||||||
|
"get_privatekey_argument_spec",
|
||||||
|
)
|
||||||
369
plugins/module_utils/_crypto/module_backends/privatekey_info.py
Normal file
369
plugins/module_utils/_crypto/module_backends/privatekey_info.py
Normal file
@@ -0,0 +1,369 @@
|
|||||||
|
# Copyright (c) 2016-2017, Yanis Guenane <yanis+ansible@guenane.org>
|
||||||
|
# Copyright (c) 2017, Markus Teufelberger <mteufelberger+ansible@mgit.at>
|
||||||
|
# Copyright (c) 2020, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
|
OpenSSLObjectError,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.math import (
|
||||||
|
binary_exp_mod,
|
||||||
|
quick_is_not_prime,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.module_backends.publickey_info import (
|
||||||
|
_get_cryptography_public_key_info,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.support import (
|
||||||
|
get_fingerprint_of_bytes,
|
||||||
|
load_privatekey,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._cryptography_dep import (
|
||||||
|
COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION,
|
||||||
|
assert_required_cryptography_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._action_module import (
|
||||||
|
AnsibleActionModule,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._filter_module import (
|
||||||
|
FilterModuleMock,
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import (
|
||||||
|
PrivateKeyTypes,
|
||||||
|
)
|
||||||
|
|
||||||
|
GeneralAnsibleModule = t.Union[AnsibleModule, AnsibleActionModule, FilterModuleMock]
|
||||||
|
|
||||||
|
|
||||||
|
MINIMAL_CRYPTOGRAPHY_VERSION = COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cryptography
|
||||||
|
from cryptography.hazmat.primitives import serialization
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
SIGNATURE_TEST_DATA = b"1234"
|
||||||
|
|
||||||
|
|
||||||
|
def _get_cryptography_private_key_info(
|
||||||
|
key: PrivateKeyTypes, *, need_private_key_data: bool = False
|
||||||
|
) -> tuple[str, dict[str, t.Any], dict[str, t.Any]]:
|
||||||
|
key_type, key_public_data = _get_cryptography_public_key_info(key.public_key())
|
||||||
|
key_private_data: dict[str, t.Any] = {}
|
||||||
|
if need_private_key_data:
|
||||||
|
if isinstance(key, cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey):
|
||||||
|
rsa_private_numbers = key.private_numbers()
|
||||||
|
key_private_data["p"] = rsa_private_numbers.p
|
||||||
|
key_private_data["q"] = rsa_private_numbers.q
|
||||||
|
key_private_data["exponent"] = rsa_private_numbers.d
|
||||||
|
elif isinstance(
|
||||||
|
key, cryptography.hazmat.primitives.asymmetric.dsa.DSAPrivateKey
|
||||||
|
):
|
||||||
|
dsa_private_numbers = key.private_numbers()
|
||||||
|
key_private_data["x"] = dsa_private_numbers.x
|
||||||
|
elif isinstance(
|
||||||
|
key, cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey
|
||||||
|
):
|
||||||
|
ecc_private_numbers = key.private_numbers()
|
||||||
|
key_private_data["multiplier"] = ecc_private_numbers.private_value
|
||||||
|
return key_type, key_public_data, key_private_data
|
||||||
|
|
||||||
|
|
||||||
|
def _check_dsa_consistency(
|
||||||
|
*, key_public_data: dict[str, t.Any], key_private_data: dict[str, t.Any]
|
||||||
|
) -> bool | None:
|
||||||
|
# Get parameters
|
||||||
|
p: int | None = key_public_data.get("p")
|
||||||
|
if p is None:
|
||||||
|
return None
|
||||||
|
q: int | None = key_public_data.get("q")
|
||||||
|
if q is None:
|
||||||
|
return None
|
||||||
|
g: int | None = key_public_data.get("g")
|
||||||
|
if g is None:
|
||||||
|
return None
|
||||||
|
y: int | None = key_public_data.get("y")
|
||||||
|
if y is None:
|
||||||
|
return None
|
||||||
|
x: int | None = key_private_data.get("x")
|
||||||
|
if x is None:
|
||||||
|
return None
|
||||||
|
# Make sure that g is not 0, 1 or -1 in Z/pZ
|
||||||
|
if g < 2 or g >= p - 1:
|
||||||
|
return False
|
||||||
|
# Make sure that x is in range
|
||||||
|
if x < 1 or x >= q:
|
||||||
|
return False
|
||||||
|
# Check whether q divides p-1
|
||||||
|
if (p - 1) % q != 0:
|
||||||
|
return False
|
||||||
|
# Check that g**q mod p == 1
|
||||||
|
if binary_exp_mod(g, q, m=p) != 1:
|
||||||
|
return False
|
||||||
|
# Check whether g**x mod p == y
|
||||||
|
if binary_exp_mod(g, x, m=p) != y:
|
||||||
|
return False
|
||||||
|
# Check (quickly) whether p or q are not primes
|
||||||
|
if quick_is_not_prime(q) or quick_is_not_prime(p):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _is_cryptography_key_consistent(
|
||||||
|
key: PrivateKeyTypes,
|
||||||
|
*,
|
||||||
|
key_public_data: dict[str, t.Any],
|
||||||
|
key_private_data: dict[str, t.Any],
|
||||||
|
warn_func: t.Callable[[str], None] | None = None,
|
||||||
|
) -> bool | None:
|
||||||
|
if isinstance(key, cryptography.hazmat.primitives.asymmetric.rsa.RSAPrivateKey):
|
||||||
|
# key._backend was removed in cryptography 42.0.0
|
||||||
|
backend = getattr(key, "_backend", None)
|
||||||
|
if backend is not None:
|
||||||
|
return bool(backend._lib.RSA_check_key(key._rsa_cdata)) # type: ignore # pylint: disable=protected-access
|
||||||
|
if isinstance(key, cryptography.hazmat.primitives.asymmetric.dsa.DSAPrivateKey):
|
||||||
|
result = _check_dsa_consistency(
|
||||||
|
key_public_data=key_public_data, key_private_data=key_private_data
|
||||||
|
)
|
||||||
|
if result is not None:
|
||||||
|
return result
|
||||||
|
signature = key.sign(
|
||||||
|
SIGNATURE_TEST_DATA, cryptography.hazmat.primitives.hashes.SHA256()
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
key.public_key().verify(
|
||||||
|
signature,
|
||||||
|
SIGNATURE_TEST_DATA,
|
||||||
|
cryptography.hazmat.primitives.hashes.SHA256(),
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
except cryptography.exceptions.InvalidSignature:
|
||||||
|
return False
|
||||||
|
if isinstance(
|
||||||
|
key, cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey
|
||||||
|
):
|
||||||
|
signature = key.sign(
|
||||||
|
SIGNATURE_TEST_DATA,
|
||||||
|
cryptography.hazmat.primitives.asymmetric.ec.ECDSA(
|
||||||
|
cryptography.hazmat.primitives.hashes.SHA256()
|
||||||
|
),
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
key.public_key().verify(
|
||||||
|
signature,
|
||||||
|
SIGNATURE_TEST_DATA,
|
||||||
|
cryptography.hazmat.primitives.asymmetric.ec.ECDSA(
|
||||||
|
cryptography.hazmat.primitives.hashes.SHA256()
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
except cryptography.exceptions.InvalidSignature:
|
||||||
|
return False
|
||||||
|
has_simple_sign_function = False
|
||||||
|
if isinstance(
|
||||||
|
key, cryptography.hazmat.primitives.asymmetric.ed25519.Ed25519PrivateKey
|
||||||
|
):
|
||||||
|
has_simple_sign_function = True
|
||||||
|
if isinstance(key, cryptography.hazmat.primitives.asymmetric.ed448.Ed448PrivateKey):
|
||||||
|
has_simple_sign_function = True
|
||||||
|
if has_simple_sign_function:
|
||||||
|
signature = key.sign(SIGNATURE_TEST_DATA) # type: ignore
|
||||||
|
try:
|
||||||
|
key.public_key().verify(signature, SIGNATURE_TEST_DATA) # type: ignore
|
||||||
|
return True
|
||||||
|
except cryptography.exceptions.InvalidSignature:
|
||||||
|
return False
|
||||||
|
# For X25519 and X448, there's no test yet.
|
||||||
|
if warn_func is not None:
|
||||||
|
warn_func(f"Cannot determine consistency for key of type {type(key)}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateKeyConsistencyError(OpenSSLObjectError):
|
||||||
|
def __init__(self, msg: str, *, result: dict[str, t.Any]) -> None:
|
||||||
|
super().__init__(msg)
|
||||||
|
self.error_message = msg
|
||||||
|
self.result = result
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateKeyParseError(OpenSSLObjectError):
|
||||||
|
def __init__(self, msg: str, *, result: dict[str, t.Any]) -> None:
|
||||||
|
super().__init__(msg)
|
||||||
|
self.error_message = msg
|
||||||
|
self.result = result
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateKeyInfoRetrieval(metaclass=abc.ABCMeta):
|
||||||
|
key: PrivateKeyTypes
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
module: GeneralAnsibleModule,
|
||||||
|
content: bytes,
|
||||||
|
passphrase: str | None = None,
|
||||||
|
return_private_key_data: bool = False,
|
||||||
|
check_consistency: bool = False,
|
||||||
|
):
|
||||||
|
self.module = module
|
||||||
|
self.content = content
|
||||||
|
self.passphrase = passphrase
|
||||||
|
self.return_private_key_data = return_private_key_data
|
||||||
|
self.check_consistency = check_consistency
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_public_key(self, *, binary: bool) -> bytes:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_key_info(
|
||||||
|
self, *, need_private_key_data: bool = False
|
||||||
|
) -> tuple[str, dict[str, t.Any], dict[str, t.Any]]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _is_key_consistent(
|
||||||
|
self, *, key_public_data: dict[str, t.Any], key_private_data: dict[str, t.Any]
|
||||||
|
) -> bool | None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_info(self, *, prefer_one_fingerprint: bool = False) -> dict[str, t.Any]:
|
||||||
|
result: dict[str, t.Any] = {
|
||||||
|
"can_parse_key": False,
|
||||||
|
"key_is_consistent": None,
|
||||||
|
}
|
||||||
|
priv_key_detail = self.content
|
||||||
|
try:
|
||||||
|
self.key = load_privatekey(
|
||||||
|
path=None,
|
||||||
|
content=priv_key_detail,
|
||||||
|
passphrase=(
|
||||||
|
to_bytes(self.passphrase)
|
||||||
|
if self.passphrase is not None
|
||||||
|
else self.passphrase
|
||||||
|
),
|
||||||
|
)
|
||||||
|
result["can_parse_key"] = True
|
||||||
|
except OpenSSLObjectError as exc:
|
||||||
|
raise PrivateKeyParseError(str(exc), result=result) from exc
|
||||||
|
|
||||||
|
result["public_key"] = to_text(self._get_public_key(binary=False))
|
||||||
|
pk = self._get_public_key(binary=True)
|
||||||
|
result["public_key_fingerprints"] = (
|
||||||
|
get_fingerprint_of_bytes(pk, prefer_one=prefer_one_fingerprint)
|
||||||
|
if pk is not None
|
||||||
|
else {}
|
||||||
|
)
|
||||||
|
|
||||||
|
key_type, key_public_data, key_private_data = self._get_key_info(
|
||||||
|
need_private_key_data=self.return_private_key_data or self.check_consistency
|
||||||
|
)
|
||||||
|
result["type"] = key_type
|
||||||
|
result["public_data"] = key_public_data
|
||||||
|
if self.return_private_key_data:
|
||||||
|
result["private_data"] = key_private_data
|
||||||
|
|
||||||
|
if self.check_consistency:
|
||||||
|
result["key_is_consistent"] = self._is_key_consistent(
|
||||||
|
key_public_data=key_public_data, key_private_data=key_private_data
|
||||||
|
)
|
||||||
|
if result["key_is_consistent"] is False:
|
||||||
|
# Only fail when it is False, to avoid to fail on None (which means "we do not know")
|
||||||
|
msg = (
|
||||||
|
"Private key is not consistent! (See "
|
||||||
|
"https://blog.hboeck.de/archives/888-How-I-tricked-Symantec-with-a-Fake-Private-Key.html)"
|
||||||
|
)
|
||||||
|
raise PrivateKeyConsistencyError(msg, result=result)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateKeyInfoRetrievalCryptography(PrivateKeyInfoRetrieval):
|
||||||
|
"""Validate the supplied private key, using the cryptography backend"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, *, module: GeneralAnsibleModule, content: bytes, **kwargs
|
||||||
|
) -> None:
|
||||||
|
super().__init__(module=module, content=content, **kwargs)
|
||||||
|
|
||||||
|
def _get_public_key(self, *, binary: bool) -> bytes:
|
||||||
|
return self.key.public_key().public_bytes(
|
||||||
|
serialization.Encoding.DER if binary else serialization.Encoding.PEM,
|
||||||
|
serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_key_info(
|
||||||
|
self, *, need_private_key_data: bool = False
|
||||||
|
) -> tuple[str, dict[str, t.Any], dict[str, t.Any]]:
|
||||||
|
return _get_cryptography_private_key_info(
|
||||||
|
self.key, need_private_key_data=need_private_key_data
|
||||||
|
)
|
||||||
|
|
||||||
|
def _is_key_consistent(
|
||||||
|
self, *, key_public_data: dict[str, t.Any], key_private_data: dict[str, t.Any]
|
||||||
|
) -> bool | None:
|
||||||
|
return _is_cryptography_key_consistent(
|
||||||
|
self.key,
|
||||||
|
key_public_data=key_public_data,
|
||||||
|
key_private_data=key_private_data,
|
||||||
|
warn_func=self.module.warn,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_privatekey_info(
|
||||||
|
*,
|
||||||
|
module: GeneralAnsibleModule,
|
||||||
|
content: bytes,
|
||||||
|
passphrase: str | None = None,
|
||||||
|
return_private_key_data: bool = False,
|
||||||
|
prefer_one_fingerprint: bool = False,
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
info = PrivateKeyInfoRetrievalCryptography(
|
||||||
|
module=module,
|
||||||
|
content=content,
|
||||||
|
passphrase=passphrase,
|
||||||
|
return_private_key_data=return_private_key_data,
|
||||||
|
)
|
||||||
|
return info.get_info(prefer_one_fingerprint=prefer_one_fingerprint)
|
||||||
|
|
||||||
|
|
||||||
|
def select_backend(
|
||||||
|
*,
|
||||||
|
module: GeneralAnsibleModule,
|
||||||
|
content: bytes,
|
||||||
|
passphrase: str | None = None,
|
||||||
|
return_private_key_data: bool = False,
|
||||||
|
check_consistency: bool = False,
|
||||||
|
) -> PrivateKeyInfoRetrieval:
|
||||||
|
assert_required_cryptography_version(
|
||||||
|
module, minimum_cryptography_version=MINIMAL_CRYPTOGRAPHY_VERSION
|
||||||
|
)
|
||||||
|
return PrivateKeyInfoRetrievalCryptography(
|
||||||
|
module=module,
|
||||||
|
content=content,
|
||||||
|
passphrase=passphrase,
|
||||||
|
return_private_key_data=return_private_key_data,
|
||||||
|
check_consistency=check_consistency,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"PrivateKeyConsistencyError",
|
||||||
|
"PrivateKeyParseError",
|
||||||
|
"PrivateKeyInfoRetrieval",
|
||||||
|
"get_privatekey_info",
|
||||||
|
"select_backend",
|
||||||
|
)
|
||||||
204
plugins/module_utils/_crypto/module_backends/publickey_info.py
Normal file
204
plugins/module_utils/_crypto/module_backends/publickey_info.py
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
# Copyright (c) 2020-2021, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
|
OpenSSLObjectError,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.support import (
|
||||||
|
get_fingerprint_of_bytes,
|
||||||
|
load_publickey,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._cryptography_dep import (
|
||||||
|
COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION,
|
||||||
|
assert_required_cryptography_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._action_module import (
|
||||||
|
AnsibleActionModule,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._filter_module import (
|
||||||
|
FilterModuleMock,
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import (
|
||||||
|
PublicKeyTypes,
|
||||||
|
)
|
||||||
|
|
||||||
|
GeneralAnsibleModule = t.Union[AnsibleModule, AnsibleActionModule, FilterModuleMock]
|
||||||
|
|
||||||
|
|
||||||
|
MINIMAL_CRYPTOGRAPHY_VERSION = COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cryptography
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.ed448
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.ed25519
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.x448
|
||||||
|
import cryptography.hazmat.primitives.asymmetric.x25519
|
||||||
|
from cryptography.hazmat.primitives import serialization
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _get_cryptography_public_key_info(
|
||||||
|
key: PublicKeyTypes,
|
||||||
|
) -> tuple[str, dict[str, t.Any]]:
|
||||||
|
key_public_data: dict[str, t.Any] = {}
|
||||||
|
if isinstance(key, cryptography.hazmat.primitives.asymmetric.rsa.RSAPublicKey):
|
||||||
|
key_type = "RSA"
|
||||||
|
rsa_public_numbers = key.public_numbers()
|
||||||
|
key_public_data["size"] = key.key_size
|
||||||
|
key_public_data["modulus"] = rsa_public_numbers.n
|
||||||
|
key_public_data["exponent"] = rsa_public_numbers.e
|
||||||
|
elif isinstance(key, cryptography.hazmat.primitives.asymmetric.dsa.DSAPublicKey):
|
||||||
|
key_type = "DSA"
|
||||||
|
dsa_parameter_numbers = key.parameters().parameter_numbers()
|
||||||
|
dsa_public_numbers = key.public_numbers()
|
||||||
|
key_public_data["size"] = key.key_size
|
||||||
|
key_public_data["p"] = dsa_parameter_numbers.p
|
||||||
|
key_public_data["q"] = dsa_parameter_numbers.q
|
||||||
|
key_public_data["g"] = dsa_parameter_numbers.g
|
||||||
|
key_public_data["y"] = dsa_public_numbers.y
|
||||||
|
elif isinstance(
|
||||||
|
key, cryptography.hazmat.primitives.asymmetric.x25519.X25519PublicKey
|
||||||
|
):
|
||||||
|
key_type = "X25519"
|
||||||
|
elif isinstance(key, cryptography.hazmat.primitives.asymmetric.x448.X448PublicKey):
|
||||||
|
key_type = "X448"
|
||||||
|
elif isinstance(
|
||||||
|
key, cryptography.hazmat.primitives.asymmetric.ed25519.Ed25519PublicKey
|
||||||
|
):
|
||||||
|
key_type = "Ed25519"
|
||||||
|
elif isinstance(
|
||||||
|
key, cryptography.hazmat.primitives.asymmetric.ed448.Ed448PublicKey
|
||||||
|
):
|
||||||
|
key_type = "Ed448"
|
||||||
|
elif isinstance(
|
||||||
|
key, cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePublicKey
|
||||||
|
):
|
||||||
|
key_type = "ECC"
|
||||||
|
ecc_public_numbers = key.public_numbers()
|
||||||
|
key_public_data["curve"] = key.curve.name
|
||||||
|
key_public_data["x"] = ecc_public_numbers.x
|
||||||
|
key_public_data["y"] = ecc_public_numbers.y
|
||||||
|
key_public_data["exponent_size"] = key.curve.key_size
|
||||||
|
else:
|
||||||
|
key_type = f"unknown ({type(key)})"
|
||||||
|
return key_type, key_public_data
|
||||||
|
|
||||||
|
|
||||||
|
class PublicKeyParseError(OpenSSLObjectError):
|
||||||
|
def __init__(self, msg: str, *, result: dict[str, t.Any]) -> None:
|
||||||
|
super().__init__(msg)
|
||||||
|
self.error_message = msg
|
||||||
|
self.result = result
|
||||||
|
|
||||||
|
|
||||||
|
class PublicKeyInfoRetrieval(metaclass=abc.ABCMeta):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
module: GeneralAnsibleModule,
|
||||||
|
content: bytes | None = None,
|
||||||
|
key: PublicKeyTypes | None = None,
|
||||||
|
) -> None:
|
||||||
|
# content must be a bytes string
|
||||||
|
self.module = module
|
||||||
|
self.content = content
|
||||||
|
self.key = key
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_public_key(self, binary: bool) -> bytes:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_key_info(self) -> tuple[str, dict[str, t.Any]]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_info(self, *, prefer_one_fingerprint: bool = False) -> dict[str, t.Any]:
|
||||||
|
result: dict[str, t.Any] = {}
|
||||||
|
if self.key is None:
|
||||||
|
try:
|
||||||
|
self.key = load_publickey(content=self.content)
|
||||||
|
except OpenSSLObjectError as e:
|
||||||
|
raise PublicKeyParseError(str(e), result={}) from e
|
||||||
|
|
||||||
|
pk = self._get_public_key(binary=True)
|
||||||
|
result["fingerprints"] = (
|
||||||
|
get_fingerprint_of_bytes(pk, prefer_one=prefer_one_fingerprint)
|
||||||
|
if pk is not None
|
||||||
|
else {}
|
||||||
|
)
|
||||||
|
|
||||||
|
key_type, key_public_data = self._get_key_info()
|
||||||
|
result["type"] = key_type
|
||||||
|
result["public_data"] = key_public_data
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class PublicKeyInfoRetrievalCryptography(PublicKeyInfoRetrieval):
|
||||||
|
"""Validate the supplied public key, using the cryptography backend"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
module: GeneralAnsibleModule,
|
||||||
|
content: bytes | None = None,
|
||||||
|
key: PublicKeyTypes | None = None,
|
||||||
|
) -> None:
|
||||||
|
super().__init__(module=module, content=content, key=key)
|
||||||
|
|
||||||
|
def _get_public_key(self, binary: bool) -> bytes:
|
||||||
|
if self.key is None:
|
||||||
|
raise AssertionError("key must be set")
|
||||||
|
return self.key.public_bytes(
|
||||||
|
serialization.Encoding.DER if binary else serialization.Encoding.PEM,
|
||||||
|
serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_key_info(self) -> tuple[str, dict[str, t.Any]]:
|
||||||
|
if self.key is None:
|
||||||
|
raise AssertionError("key must be set")
|
||||||
|
return _get_cryptography_public_key_info(self.key)
|
||||||
|
|
||||||
|
|
||||||
|
def get_publickey_info(
|
||||||
|
*,
|
||||||
|
module: GeneralAnsibleModule,
|
||||||
|
content: bytes | None = None,
|
||||||
|
key: PublicKeyTypes | None = None,
|
||||||
|
prefer_one_fingerprint: bool = False,
|
||||||
|
) -> dict[str, t.Any]:
|
||||||
|
info = PublicKeyInfoRetrievalCryptography(module=module, content=content, key=key)
|
||||||
|
return info.get_info(prefer_one_fingerprint=prefer_one_fingerprint)
|
||||||
|
|
||||||
|
|
||||||
|
def select_backend(
|
||||||
|
*,
|
||||||
|
module: GeneralAnsibleModule,
|
||||||
|
content: bytes | None = None,
|
||||||
|
key: PublicKeyTypes | None = None,
|
||||||
|
) -> PublicKeyInfoRetrieval:
|
||||||
|
assert_required_cryptography_version(
|
||||||
|
module, minimum_cryptography_version=MINIMAL_CRYPTOGRAPHY_VERSION
|
||||||
|
)
|
||||||
|
return PublicKeyInfoRetrievalCryptography(module=module, content=content, key=key)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"PublicKeyParseError",
|
||||||
|
"PublicKeyInfoRetrieval",
|
||||||
|
"get_publickey_info",
|
||||||
|
"select_backend",
|
||||||
|
)
|
||||||
141
plugins/module_utils/_crypto/pem.py
Normal file
141
plugins/module_utils/_crypto/pem.py
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
# Copyright (c) 2019, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
|
||||||
|
PEM_START = "-----BEGIN "
|
||||||
|
PEM_END_START = "-----END "
|
||||||
|
PEM_END = "-----"
|
||||||
|
PKCS8_PRIVATEKEY_NAMES = ("PRIVATE KEY", "ENCRYPTED PRIVATE KEY")
|
||||||
|
PKCS1_PRIVATEKEY_SUFFIX = " PRIVATE KEY"
|
||||||
|
|
||||||
|
|
||||||
|
def identify_pem_format(content: bytes, *, encoding: str = "utf-8") -> bool:
|
||||||
|
"""Given the contents of a binary file, tests whether this could be a PEM file."""
|
||||||
|
try:
|
||||||
|
first_pem = extract_first_pem(content.decode(encoding))
|
||||||
|
if first_pem is None:
|
||||||
|
return False
|
||||||
|
lines = first_pem.splitlines(False)
|
||||||
|
if (
|
||||||
|
lines[0].startswith(PEM_START)
|
||||||
|
and lines[0].endswith(PEM_END)
|
||||||
|
and len(lines[0]) > len(PEM_START) + len(PEM_END)
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
pass
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def identify_private_key_format(
|
||||||
|
content: bytes, *, encoding: str = "utf-8"
|
||||||
|
) -> t.Literal["raw", "pkcs1", "pkcs8", "unknown-pem"]:
|
||||||
|
"""Given the contents of a private key file, identifies its format."""
|
||||||
|
# See https://github.com/openssl/openssl/blob/master/crypto/pem/pem_pkey.c#L40-L85
|
||||||
|
# (PEM_read_bio_PrivateKey)
|
||||||
|
# and https://github.com/openssl/openssl/blob/master/include/openssl/pem.h#L46-L47
|
||||||
|
# (PEM_STRING_PKCS8, PEM_STRING_PKCS8INF)
|
||||||
|
try:
|
||||||
|
first_pem = extract_first_pem(content.decode(encoding))
|
||||||
|
if first_pem is None:
|
||||||
|
return "raw"
|
||||||
|
lines = first_pem.splitlines(False)
|
||||||
|
if (
|
||||||
|
lines[0].startswith(PEM_START)
|
||||||
|
and lines[0].endswith(PEM_END)
|
||||||
|
and len(lines[0]) > len(PEM_START) + len(PEM_END)
|
||||||
|
):
|
||||||
|
name = lines[0][len(PEM_START) : -len(PEM_END)]
|
||||||
|
if name in PKCS8_PRIVATEKEY_NAMES:
|
||||||
|
return "pkcs8"
|
||||||
|
if len(name) > len(PKCS1_PRIVATEKEY_SUFFIX) and name.endswith(
|
||||||
|
PKCS1_PRIVATEKEY_SUFFIX
|
||||||
|
):
|
||||||
|
return "pkcs1"
|
||||||
|
return "unknown-pem"
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
pass
|
||||||
|
return "raw"
|
||||||
|
|
||||||
|
|
||||||
|
def split_pem_list(text: str, *, keep_inbetween: bool = False) -> list[str]:
|
||||||
|
"""
|
||||||
|
Split concatenated PEM objects into a list of strings, where each is one PEM object.
|
||||||
|
"""
|
||||||
|
result = []
|
||||||
|
current: list[str] | None = [] if keep_inbetween else None
|
||||||
|
for line in text.splitlines(True):
|
||||||
|
if line.strip():
|
||||||
|
if not keep_inbetween and line.startswith("-----BEGIN "):
|
||||||
|
current = []
|
||||||
|
if current is not None:
|
||||||
|
current.append(line)
|
||||||
|
if line.startswith("-----END "):
|
||||||
|
result.append("".join(current))
|
||||||
|
current = [] if keep_inbetween else None
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def extract_first_pem(text: str) -> str | None:
|
||||||
|
"""
|
||||||
|
Given one PEM or multiple concatenated PEM objects, return only the first one, or None if there is none.
|
||||||
|
"""
|
||||||
|
all_pems = split_pem_list(text)
|
||||||
|
if not all_pems:
|
||||||
|
return None
|
||||||
|
return all_pems[0]
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_type(line: str, *, start: str = PEM_START) -> str | None:
|
||||||
|
if not line.startswith(start):
|
||||||
|
return None
|
||||||
|
if not line.endswith(PEM_END):
|
||||||
|
return None
|
||||||
|
return line[len(start) : -len(PEM_END)]
|
||||||
|
|
||||||
|
|
||||||
|
def extract_pem(content: str, *, strict: bool = False) -> tuple[str, str]:
|
||||||
|
lines = content.splitlines()
|
||||||
|
if len(lines) < 3:
|
||||||
|
raise ValueError(f"PEM must have at least 3 lines, have only {len(lines)}")
|
||||||
|
header_type = _extract_type(lines[0])
|
||||||
|
if header_type is None:
|
||||||
|
raise ValueError(
|
||||||
|
f"First line is not of format {PEM_START}...{PEM_END}: {lines[0]!r}"
|
||||||
|
)
|
||||||
|
footer_type = _extract_type(lines[-1], start=PEM_END_START)
|
||||||
|
if strict:
|
||||||
|
if header_type != footer_type:
|
||||||
|
raise ValueError(
|
||||||
|
f"Header type ({header_type}) is different from footer type ({footer_type})"
|
||||||
|
)
|
||||||
|
for idx, line in enumerate(lines[1:-2]):
|
||||||
|
if len(line) != 64:
|
||||||
|
raise ValueError(f"Line {idx} has length {len(line)} instead of 64")
|
||||||
|
if not (0 < len(lines[-2]) <= 64):
|
||||||
|
raise ValueError(
|
||||||
|
f"Last line has length {len(lines[-2])}, should be in (0, 64]"
|
||||||
|
)
|
||||||
|
return header_type, "".join(lines[1:-1])
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"PEM_START",
|
||||||
|
"PEM_END_START",
|
||||||
|
"PEM_END",
|
||||||
|
"PKCS8_PRIVATEKEY_NAMES",
|
||||||
|
"PKCS1_PRIVATEKEY_SUFFIX",
|
||||||
|
"identify_pem_format",
|
||||||
|
"identify_private_key_format",
|
||||||
|
"split_pem_list",
|
||||||
|
"extract_first_pem",
|
||||||
|
"extract_pem",
|
||||||
|
)
|
||||||
447
plugins/module_utils/_crypto/support.py
Normal file
447
plugins/module_utils/_crypto/support.py
Normal file
@@ -0,0 +1,447 @@
|
|||||||
|
# Copyright (c) 2016, Yanis Guenane <yanis+ansible@guenane.org>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import errno
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_support import (
|
||||||
|
is_potential_certificate_issuer_private_key,
|
||||||
|
is_potential_certificate_private_key,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.pem import (
|
||||||
|
identify_pem_format,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from cryptography import x509
|
||||||
|
from cryptography.hazmat.primitives import hashes, serialization
|
||||||
|
from cryptography.hazmat.primitives.serialization import load_pem_private_key
|
||||||
|
except ImportError:
|
||||||
|
# Error handled in the calling module.
|
||||||
|
pass
|
||||||
|
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
|
OpenSSLBadPassphraseError,
|
||||||
|
OpenSSLObjectError,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_support import (
|
||||||
|
CertificatePrivateKeyTypes,
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import (
|
||||||
|
CertificateIssuerPrivateKeyTypes,
|
||||||
|
PrivateKeyTypes,
|
||||||
|
PublicKeyTypes,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# This list of preferred fingerprints is used when prefer_one=True is supplied to the
|
||||||
|
# fingerprinting methods.
|
||||||
|
PREFERRED_FINGERPRINTS = (
|
||||||
|
"sha256",
|
||||||
|
"sha3_256",
|
||||||
|
"sha512",
|
||||||
|
"sha3_512",
|
||||||
|
"sha384",
|
||||||
|
"sha3_384",
|
||||||
|
"sha1",
|
||||||
|
"md5",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_fingerprint_of_bytes(
|
||||||
|
source: bytes, *, prefer_one: bool = False
|
||||||
|
) -> dict[str, str]:
|
||||||
|
"""Generate the fingerprint of the given bytes."""
|
||||||
|
|
||||||
|
fingerprint = {}
|
||||||
|
|
||||||
|
algorithms: t.Iterable[str] = hashlib.algorithms_guaranteed
|
||||||
|
|
||||||
|
if prefer_one:
|
||||||
|
# Sort algorithms to have the ones in PREFERRED_FINGERPRINTS at the beginning
|
||||||
|
prefered_algorithms = [
|
||||||
|
algorithm for algorithm in PREFERRED_FINGERPRINTS if algorithm in algorithms
|
||||||
|
]
|
||||||
|
prefered_algorithms += sorted(
|
||||||
|
[
|
||||||
|
algorithm
|
||||||
|
for algorithm in algorithms
|
||||||
|
if algorithm not in PREFERRED_FINGERPRINTS
|
||||||
|
]
|
||||||
|
)
|
||||||
|
algorithms = prefered_algorithms
|
||||||
|
|
||||||
|
for algo in algorithms:
|
||||||
|
f = getattr(hashlib, algo)
|
||||||
|
try:
|
||||||
|
h = f(source)
|
||||||
|
except ValueError:
|
||||||
|
# This can happen for hash algorithms not supported in FIPS mode
|
||||||
|
# (https://github.com/ansible/ansible/issues/67213)
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# Certain hash functions have a hexdigest() which expects a length parameter
|
||||||
|
pubkey_digest = h.hexdigest()
|
||||||
|
except TypeError:
|
||||||
|
pubkey_digest = h.hexdigest(32)
|
||||||
|
fingerprint[algo] = ":".join(
|
||||||
|
pubkey_digest[i : i + 2] for i in range(0, len(pubkey_digest), 2)
|
||||||
|
)
|
||||||
|
if prefer_one:
|
||||||
|
break
|
||||||
|
|
||||||
|
return fingerprint
|
||||||
|
|
||||||
|
|
||||||
|
def get_fingerprint_of_privatekey(
|
||||||
|
privatekey: PrivateKeyTypes, *, prefer_one: bool = False
|
||||||
|
) -> dict[str, str]:
|
||||||
|
"""Generate the fingerprint of the public key."""
|
||||||
|
|
||||||
|
publickey = privatekey.public_key().public_bytes(
|
||||||
|
serialization.Encoding.DER, serialization.PublicFormat.SubjectPublicKeyInfo
|
||||||
|
)
|
||||||
|
|
||||||
|
return get_fingerprint_of_bytes(publickey, prefer_one=prefer_one)
|
||||||
|
|
||||||
|
|
||||||
|
def get_fingerprint(
|
||||||
|
*,
|
||||||
|
path: os.PathLike | str | None = None,
|
||||||
|
passphrase: str | bytes | None = None,
|
||||||
|
content: bytes | None = None,
|
||||||
|
prefer_one: bool = False,
|
||||||
|
) -> dict[str, str]:
|
||||||
|
"""Generate the fingerprint of the public key."""
|
||||||
|
|
||||||
|
privatekey = load_privatekey(
|
||||||
|
path=path,
|
||||||
|
passphrase=passphrase,
|
||||||
|
content=content,
|
||||||
|
check_passphrase=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
return get_fingerprint_of_privatekey(privatekey, prefer_one=prefer_one)
|
||||||
|
|
||||||
|
|
||||||
|
def load_privatekey(
|
||||||
|
*,
|
||||||
|
path: os.PathLike | str | None = None,
|
||||||
|
passphrase: str | bytes | None = None,
|
||||||
|
check_passphrase: bool = True,
|
||||||
|
content: bytes | None = None,
|
||||||
|
) -> PrivateKeyTypes:
|
||||||
|
"""Load the specified OpenSSL private key.
|
||||||
|
|
||||||
|
The content can also be specified via content; in that case,
|
||||||
|
this function will not load the key from disk.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
if content is None:
|
||||||
|
if path is None:
|
||||||
|
raise OpenSSLObjectError("Must provide either path or content")
|
||||||
|
with open(path, "rb") as b_priv_key_fh:
|
||||||
|
priv_key_detail = b_priv_key_fh.read()
|
||||||
|
else:
|
||||||
|
priv_key_detail = content
|
||||||
|
except (IOError, OSError) as exc:
|
||||||
|
raise OpenSSLObjectError(exc) from exc
|
||||||
|
|
||||||
|
try:
|
||||||
|
return load_pem_private_key(
|
||||||
|
priv_key_detail,
|
||||||
|
None if passphrase is None else to_bytes(passphrase),
|
||||||
|
)
|
||||||
|
except TypeError as exc:
|
||||||
|
raise OpenSSLBadPassphraseError(
|
||||||
|
"Wrong or empty passphrase provided for private key"
|
||||||
|
) from exc
|
||||||
|
except ValueError as exc:
|
||||||
|
raise OpenSSLBadPassphraseError(
|
||||||
|
"Wrong passphrase provided for private key"
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
|
||||||
|
def load_certificate_privatekey(
|
||||||
|
*,
|
||||||
|
path: os.PathLike | str | None = None,
|
||||||
|
content: bytes | None = None,
|
||||||
|
passphrase: str | bytes | None = None,
|
||||||
|
check_passphrase: bool = True,
|
||||||
|
) -> CertificatePrivateKeyTypes:
|
||||||
|
"""
|
||||||
|
Load the specified OpenSSL private key that can be used as a private key for certificates.
|
||||||
|
"""
|
||||||
|
private_key = load_privatekey(
|
||||||
|
path=path,
|
||||||
|
passphrase=passphrase,
|
||||||
|
check_passphrase=check_passphrase,
|
||||||
|
content=content,
|
||||||
|
)
|
||||||
|
if not is_potential_certificate_private_key(private_key):
|
||||||
|
raise OpenSSLObjectError(
|
||||||
|
f"Key of type {type(private_key)} not supported for certificates"
|
||||||
|
)
|
||||||
|
return private_key
|
||||||
|
|
||||||
|
|
||||||
|
def load_certificate_issuer_privatekey(
|
||||||
|
*,
|
||||||
|
path: os.PathLike | str | None = None,
|
||||||
|
content: bytes | None = None,
|
||||||
|
passphrase: str | bytes | None = None,
|
||||||
|
check_passphrase: bool = True,
|
||||||
|
) -> CertificateIssuerPrivateKeyTypes:
|
||||||
|
"""
|
||||||
|
Load the specified OpenSSL private key that can be used for issuing certificates.
|
||||||
|
"""
|
||||||
|
private_key = load_privatekey(
|
||||||
|
path=path,
|
||||||
|
passphrase=passphrase,
|
||||||
|
check_passphrase=check_passphrase,
|
||||||
|
content=content,
|
||||||
|
)
|
||||||
|
if not is_potential_certificate_issuer_private_key(private_key):
|
||||||
|
raise OpenSSLObjectError(
|
||||||
|
f"Key of type {type(private_key)} not supported for issuing certificates"
|
||||||
|
)
|
||||||
|
return private_key
|
||||||
|
|
||||||
|
|
||||||
|
def load_publickey(
|
||||||
|
*, path: os.PathLike | str | None = None, content: bytes | None = None
|
||||||
|
) -> PublicKeyTypes:
|
||||||
|
if content is None:
|
||||||
|
if path is None:
|
||||||
|
raise OpenSSLObjectError("Must provide either path or content")
|
||||||
|
try:
|
||||||
|
with open(path, "rb") as b_priv_key_fh:
|
||||||
|
content = b_priv_key_fh.read()
|
||||||
|
except (IOError, OSError) as exc:
|
||||||
|
raise OpenSSLObjectError(exc) from exc
|
||||||
|
|
||||||
|
try:
|
||||||
|
return serialization.load_pem_public_key(content)
|
||||||
|
except Exception as e:
|
||||||
|
raise OpenSSLObjectError(f"Error while deserializing key: {e}") from e
|
||||||
|
|
||||||
|
|
||||||
|
def load_certificate(
|
||||||
|
*,
|
||||||
|
path: os.PathLike | str | None = None,
|
||||||
|
content: bytes | None = None,
|
||||||
|
der_support_enabled: bool = False,
|
||||||
|
) -> x509.Certificate:
|
||||||
|
"""Load the specified certificate."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
if content is None:
|
||||||
|
if path is None:
|
||||||
|
raise OpenSSLObjectError("Must provide either path or content")
|
||||||
|
with open(path, "rb") as cert_fh:
|
||||||
|
cert_content = cert_fh.read()
|
||||||
|
else:
|
||||||
|
cert_content = content
|
||||||
|
except (IOError, OSError) as exc:
|
||||||
|
raise OpenSSLObjectError(exc) from exc
|
||||||
|
if der_support_enabled is False or identify_pem_format(cert_content):
|
||||||
|
try:
|
||||||
|
return x509.load_pem_x509_certificate(cert_content)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise OpenSSLObjectError(exc) from exc
|
||||||
|
elif der_support_enabled:
|
||||||
|
try:
|
||||||
|
return x509.load_der_x509_certificate(cert_content)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise OpenSSLObjectError(f"Cannot parse DER certificate: {exc}") from exc
|
||||||
|
|
||||||
|
|
||||||
|
def load_certificate_request(
|
||||||
|
*, path: os.PathLike | str | None = None, content: bytes | None = None
|
||||||
|
) -> x509.CertificateSigningRequest:
|
||||||
|
"""Load the specified certificate signing request."""
|
||||||
|
try:
|
||||||
|
if content is None:
|
||||||
|
if path is None:
|
||||||
|
raise OpenSSLObjectError("Must provide either path or content")
|
||||||
|
with open(path, "rb") as csr_fh:
|
||||||
|
csr_content = csr_fh.read()
|
||||||
|
else:
|
||||||
|
csr_content = content
|
||||||
|
except (IOError, OSError) as exc:
|
||||||
|
raise OpenSSLObjectError(exc) from exc
|
||||||
|
try:
|
||||||
|
return x509.load_pem_x509_csr(csr_content)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise OpenSSLObjectError(exc) from exc
|
||||||
|
|
||||||
|
|
||||||
|
def parse_name_field(
|
||||||
|
input_dict: dict[str, list[str | bytes] | str | bytes],
|
||||||
|
*,
|
||||||
|
name_field_name: str | None = None,
|
||||||
|
) -> list[tuple[str, str | bytes]]:
|
||||||
|
"""Take a dict with key: value or key: list_of_values mappings and return a list of tuples"""
|
||||||
|
|
||||||
|
def error_str(key: str) -> str:
|
||||||
|
if name_field_name is None:
|
||||||
|
return f"{key}"
|
||||||
|
return f"{key} in {name_field_name}"
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for key, value in input_dict.items():
|
||||||
|
if isinstance(value, list):
|
||||||
|
for entry in value:
|
||||||
|
if not isinstance(entry, (str, bytes)):
|
||||||
|
raise TypeError(f"Values {error_str(key)} must be strings")
|
||||||
|
if not entry:
|
||||||
|
raise ValueError(
|
||||||
|
f"Values for {error_str(key)} must not be empty strings"
|
||||||
|
)
|
||||||
|
result.append((key, entry))
|
||||||
|
elif isinstance(value, (str, bytes)):
|
||||||
|
if not value:
|
||||||
|
raise ValueError(
|
||||||
|
f"Value for {error_str(key)} must not be an empty string"
|
||||||
|
)
|
||||||
|
result.append((key, value))
|
||||||
|
else:
|
||||||
|
raise TypeError(
|
||||||
|
f"Value for {error_str(key)} must be either a string or a list of strings"
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def parse_ordered_name_field(
|
||||||
|
input_list: list[dict[str, list[str | bytes] | str | bytes]],
|
||||||
|
*,
|
||||||
|
name_field_name: str,
|
||||||
|
) -> list[tuple[str, str | bytes]]:
|
||||||
|
"""Take a dict with key: value or key: list_of_values mappings and return a list of tuples"""
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for index, entry in enumerate(input_list):
|
||||||
|
if len(entry) != 1:
|
||||||
|
raise ValueError(
|
||||||
|
f"Entry #{index + 1} in {name_field_name} must be a dictionary with exactly one key-value pair"
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
result.extend(parse_name_field(entry, name_field_name=name_field_name))
|
||||||
|
except (TypeError, ValueError) as exc:
|
||||||
|
raise ValueError(
|
||||||
|
f"Error while processing entry #{index + 1} in {name_field_name}: {exc}"
|
||||||
|
) from exc
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def select_message_digest(
|
||||||
|
digest_string: t.Literal["sha256", "sha384", "sha512", "sha1", "md5"],
|
||||||
|
) -> hashes.SHA256 | hashes.SHA384 | hashes.SHA512 | hashes.SHA1 | hashes.MD5: ...
|
||||||
|
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def select_message_digest(
|
||||||
|
digest_string: str,
|
||||||
|
) -> (
|
||||||
|
hashes.SHA256 | hashes.SHA384 | hashes.SHA512 | hashes.SHA1 | hashes.MD5 | None
|
||||||
|
): ...
|
||||||
|
|
||||||
|
|
||||||
|
def select_message_digest(
|
||||||
|
digest_string: str,
|
||||||
|
) -> hashes.SHA256 | hashes.SHA384 | hashes.SHA512 | hashes.SHA1 | hashes.MD5 | None:
|
||||||
|
if digest_string == "sha256":
|
||||||
|
return hashes.SHA256()
|
||||||
|
if digest_string == "sha384":
|
||||||
|
return hashes.SHA384()
|
||||||
|
if digest_string == "sha512":
|
||||||
|
return hashes.SHA512()
|
||||||
|
if digest_string == "sha1":
|
||||||
|
return hashes.SHA1()
|
||||||
|
if digest_string == "md5":
|
||||||
|
return hashes.MD5()
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class OpenSSLObject(metaclass=abc.ABCMeta):
|
||||||
|
|
||||||
|
def __init__(self, *, path: str, state: str, force: bool, check_mode: bool) -> None:
|
||||||
|
self.path = path
|
||||||
|
self.state = state
|
||||||
|
self.force = force
|
||||||
|
self.name = os.path.basename(path)
|
||||||
|
self.changed = False
|
||||||
|
self.check_mode = check_mode
|
||||||
|
|
||||||
|
def check(self, module: AnsibleModule, *, perms_required: bool = True) -> bool:
|
||||||
|
"""Ensure the resource is in its desired state."""
|
||||||
|
|
||||||
|
def _check_state() -> bool:
|
||||||
|
return os.path.exists(self.path)
|
||||||
|
|
||||||
|
def _check_perms(module: AnsibleModule) -> bool:
|
||||||
|
file_args = module.load_file_common_arguments(module.params)
|
||||||
|
if module.check_file_absent_if_check_mode(file_args["path"]):
|
||||||
|
return False
|
||||||
|
return not module.set_fs_attributes_if_different(file_args, False)
|
||||||
|
|
||||||
|
if not perms_required:
|
||||||
|
return _check_state()
|
||||||
|
|
||||||
|
return _check_state() and _check_perms(module)
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def dump(self) -> dict[str, t.Any]:
|
||||||
|
"""Serialize the object into a dictionary."""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def generate(self, module: AnsibleModule) -> None:
|
||||||
|
"""Generate the resource."""
|
||||||
|
|
||||||
|
def remove(self, module: AnsibleModule) -> None:
|
||||||
|
"""Remove the resource from the filesystem."""
|
||||||
|
if self.check_mode:
|
||||||
|
if os.path.exists(self.path):
|
||||||
|
self.changed = True
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.remove(self.path)
|
||||||
|
self.changed = True
|
||||||
|
except OSError as exc:
|
||||||
|
if exc.errno != errno.ENOENT:
|
||||||
|
raise OpenSSLObjectError(exc) from exc
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"get_fingerprint_of_bytes",
|
||||||
|
"get_fingerprint_of_privatekey",
|
||||||
|
"get_fingerprint",
|
||||||
|
"load_privatekey",
|
||||||
|
"load_certificate_privatekey",
|
||||||
|
"load_certificate_issuer_privatekey",
|
||||||
|
"load_publickey",
|
||||||
|
"load_certificate",
|
||||||
|
"load_certificate_request",
|
||||||
|
"parse_name_field",
|
||||||
|
"parse_ordered_name_field",
|
||||||
|
"select_message_digest",
|
||||||
|
"OpenSSLObject",
|
||||||
|
)
|
||||||
81
plugins/module_utils/_cryptography_dep.py
Normal file
81
plugins/module_utils/_cryptography_dep.py
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
# Copyright (c) 2025 Ansible project
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
"""
|
||||||
|
Module utils for cryptography requirements.
|
||||||
|
|
||||||
|
Must be kept in sync with plugins/doc_fragments/cryptography_dep.py.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import traceback
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import missing_required_lib
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._version import (
|
||||||
|
LooseVersion,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._action_module import (
|
||||||
|
AnsibleActionModule,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.plugin_utils._filter_module import (
|
||||||
|
FilterModuleMock,
|
||||||
|
)
|
||||||
|
|
||||||
|
GeneralAnsibleModule = t.Union[AnsibleModule, AnsibleActionModule, FilterModuleMock]
|
||||||
|
|
||||||
|
|
||||||
|
_CRYPTOGRAPHY_IMP_ERR: str | None = None
|
||||||
|
_CRYPTOGRAPHY_FILE: str | None = None
|
||||||
|
try:
|
||||||
|
import cryptography
|
||||||
|
from cryptography import x509 # noqa: F401, pylint: disable=unused-import
|
||||||
|
|
||||||
|
CRYPTOGRAPHY_VERSION = LooseVersion(cryptography.__version__)
|
||||||
|
_CRYPTOGRAPHY_FILE = cryptography.__file__
|
||||||
|
except ImportError:
|
||||||
|
_CRYPTOGRAPHY_IMP_ERR = traceback.format_exc()
|
||||||
|
CRYPTOGRAPHY_FOUND = False
|
||||||
|
CRYPTOGRAPHY_VERSION = LooseVersion("0.0")
|
||||||
|
else:
|
||||||
|
CRYPTOGRAPHY_FOUND = True
|
||||||
|
|
||||||
|
|
||||||
|
# Corresponds to the community.crypto.cryptography_dep.minimum doc fragment
|
||||||
|
COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION = "3.3"
|
||||||
|
|
||||||
|
|
||||||
|
def assert_required_cryptography_version(
|
||||||
|
module: GeneralAnsibleModule,
|
||||||
|
*,
|
||||||
|
minimum_cryptography_version: str = COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION,
|
||||||
|
) -> None:
|
||||||
|
if not CRYPTOGRAPHY_FOUND:
|
||||||
|
module.fail_json(
|
||||||
|
msg=missing_required_lib(f"cryptography >= {minimum_cryptography_version}"),
|
||||||
|
exception=_CRYPTOGRAPHY_IMP_ERR,
|
||||||
|
)
|
||||||
|
if CRYPTOGRAPHY_VERSION < LooseVersion(minimum_cryptography_version):
|
||||||
|
module.fail_json(
|
||||||
|
msg=(
|
||||||
|
f"Cannot detect the required Python library cryptography (>= {minimum_cryptography_version})."
|
||||||
|
f" Only found a too old version ({CRYPTOGRAPHY_VERSION}) at {_CRYPTOGRAPHY_FILE}."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION",
|
||||||
|
"CRYPTOGRAPHY_FOUND",
|
||||||
|
"CRYPTOGRAPHY_VERSION",
|
||||||
|
"assert_required_cryptography_version",
|
||||||
|
)
|
||||||
@@ -1,5 +1,3 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
# This code is part of Ansible, but is an independent component.
|
# This code is part of Ansible, but is an independent component.
|
||||||
# This particular file snippet, and this file snippet only, is licensed under the
|
# This particular file snippet, and this file snippet only, is licensed under the
|
||||||
# Modified BSD License. Modules you write using this snippet, which is embedded
|
# Modified BSD License. Modules you write using this snippet, which is embedded
|
||||||
@@ -10,21 +8,28 @@
|
|||||||
# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause)
|
# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause)
|
||||||
# SPDX-License-Identifier: BSD-2-Clause
|
# SPDX-License-Identifier: BSD-2-Clause
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
__metaclass__ = type
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import traceback
|
import traceback
|
||||||
|
import typing as t
|
||||||
|
from urllib.error import HTTPError
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
from ansible.module_utils.common.text.converters import to_text, to_native
|
|
||||||
from ansible.module_utils.basic import missing_required_lib
|
from ansible.module_utils.basic import missing_required_lib
|
||||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
from ansible.module_utils.six.moves.urllib.error import HTTPError
|
|
||||||
from ansible.module_utils.urls import Request
|
from ansible.module_utils.urls import Request
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
_P = t.ParamSpec("_P")
|
||||||
|
|
||||||
|
|
||||||
YAML_IMP_ERR = None
|
YAML_IMP_ERR = None
|
||||||
try:
|
try:
|
||||||
import yaml
|
import yaml
|
||||||
@@ -37,32 +42,37 @@ else:
|
|||||||
valid_file_format = re.compile(r".*(\.)(yml|yaml|json)$")
|
valid_file_format = re.compile(r".*(\.)(yml|yaml|json)$")
|
||||||
|
|
||||||
|
|
||||||
def ecs_client_argument_spec():
|
def ecs_client_argument_spec() -> dict[str, t.Any]:
|
||||||
return dict(
|
return {
|
||||||
entrust_api_user=dict(type='str', required=True),
|
"entrust_api_user": {"type": "str", "required": True},
|
||||||
entrust_api_key=dict(type='str', required=True, no_log=True),
|
"entrust_api_key": {"type": "str", "required": True, "no_log": True},
|
||||||
entrust_api_client_cert_path=dict(type='path', required=True),
|
"entrust_api_client_cert_path": {"type": "path", "required": True},
|
||||||
entrust_api_client_cert_key_path=dict(type='path', required=True, no_log=True),
|
"entrust_api_client_cert_key_path": {
|
||||||
entrust_api_specification_path=dict(type='path', default='https://cloud.entrust.net/EntrustCloud/documentation/cms-api-2.1.0.yaml'),
|
"type": "path",
|
||||||
)
|
"required": True,
|
||||||
|
"no_log": True,
|
||||||
|
},
|
||||||
|
"entrust_api_specification_path": {
|
||||||
|
"type": "path",
|
||||||
|
"default": "https://cloud.entrust.net/EntrustCloud/documentation/cms-api-2.1.0.yaml",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class SessionConfigurationException(Exception):
|
class SessionConfigurationException(Exception):
|
||||||
"""Raised if we cannot configure a session with the API"""
|
"""Raised if we cannot configure a session with the API"""
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class RestOperationException(Exception):
|
class RestOperationException(Exception):
|
||||||
"""Encapsulate a REST API error"""
|
"""Encapsulate a REST API error"""
|
||||||
|
|
||||||
def __init__(self, error):
|
def __init__(self, error: dict[str, t.Any]) -> None:
|
||||||
self.status = to_native(error.get("status", None))
|
self.status = to_text(error.get("status", None))
|
||||||
self.errors = [to_native(err.get("message")) for err in error.get("errors", {})]
|
self.errors = [to_text(err.get("message")) for err in error.get("errors", {})]
|
||||||
self.message = to_native(" ".join(self.errors))
|
self.message = " ".join(self.errors)
|
||||||
|
|
||||||
|
|
||||||
def generate_docstring(operation_spec):
|
def generate_docstring(operation_spec: dict[str, t.Any]) -> str:
|
||||||
"""Generate a docstring for an operation defined in operation_spec (swagger)"""
|
"""Generate a docstring for an operation defined in operation_spec (swagger)"""
|
||||||
# Description of the operation
|
# Description of the operation
|
||||||
docs = operation_spec.get("description", "No Description")
|
docs = operation_spec.get("description", "No Description")
|
||||||
@@ -73,43 +83,56 @@ def generate_docstring(operation_spec):
|
|||||||
if len(parameters) != 0:
|
if len(parameters) != 0:
|
||||||
docs += "\tArguments:\n\n"
|
docs += "\tArguments:\n\n"
|
||||||
for parameter in parameters:
|
for parameter in parameters:
|
||||||
docs += "{0} ({1}:{2}): {3}\n".format(
|
req = "Required" if parameter.get("required", False) else "Not Required"
|
||||||
parameter.get("name"),
|
docs += f"{parameter.get('name')} ({parameter.get('type', 'No Type')}:{req}): {parameter.get('description')}\n"
|
||||||
parameter.get("type", "No Type"),
|
|
||||||
"Required" if parameter.get("required", False) else "Not Required",
|
|
||||||
parameter.get("description"),
|
|
||||||
)
|
|
||||||
|
|
||||||
return docs
|
return docs
|
||||||
|
|
||||||
|
|
||||||
def bind(instance, method, operation_spec):
|
_T = t.TypeVar("_T")
|
||||||
def binding_scope_fn(*args, **kwargs):
|
_R = t.TypeVar("_R")
|
||||||
|
|
||||||
|
|
||||||
|
def bind(
|
||||||
|
instance: _T,
|
||||||
|
method: t.Callable[t.Concatenate[_T, _P], _R],
|
||||||
|
operation_spec: dict[str, str],
|
||||||
|
) -> t.Callable[_P, _R]:
|
||||||
|
def binding_scope_fn(*args, **kwargs) -> _R:
|
||||||
return method(instance, *args, **kwargs)
|
return method(instance, *args, **kwargs)
|
||||||
|
|
||||||
# Make sure we do not confuse users; add the proper name and documentation to the function.
|
# Make sure we do not confuse users; add the proper name and documentation to the function.
|
||||||
# Users can use !help(<function>) to get help on the function from interactive python or pdb
|
# Users can use !help(<function>) to get help on the function from interactive python or pdb
|
||||||
operation_name = operation_spec.get("operationId").split("Using")[0]
|
operation_name = operation_spec["operationId"].split("Using")[0]
|
||||||
binding_scope_fn.__name__ = str(operation_name)
|
binding_scope_fn.__name__ = str(operation_name)
|
||||||
binding_scope_fn.__doc__ = generate_docstring(operation_spec)
|
binding_scope_fn.__doc__ = generate_docstring(operation_spec)
|
||||||
|
|
||||||
return binding_scope_fn
|
return binding_scope_fn
|
||||||
|
|
||||||
|
|
||||||
class RestOperation(object):
|
class RestOperation:
|
||||||
def __init__(self, session, uri, method, parameters=None):
|
def __init__(
|
||||||
|
self,
|
||||||
|
session: "ECSSession",
|
||||||
|
uri: str,
|
||||||
|
method: str,
|
||||||
|
parameters: dict | None = None,
|
||||||
|
) -> None:
|
||||||
self.session = session
|
self.session = session
|
||||||
self.method = method
|
self.method = method
|
||||||
if parameters is None:
|
if parameters is None:
|
||||||
self.parameters = {}
|
self.parameters = {}
|
||||||
else:
|
else:
|
||||||
self.parameters = parameters
|
self.parameters = parameters
|
||||||
self.url = "{scheme}://{host}{base_path}{uri}".format(scheme="https", host=session._spec.get("host"), base_path=session._spec.get("basePath"), uri=uri)
|
self.url = (
|
||||||
|
f"https://{session._spec.get('host')}{session._spec.get('basePath')}{uri}"
|
||||||
|
)
|
||||||
|
|
||||||
def restmethod(self, *args, **kwargs):
|
def restmethod(self, *args, **kwargs) -> t.Any:
|
||||||
"""Do the hard work of making the request here"""
|
"""Do the hard work of making the request here"""
|
||||||
|
|
||||||
# gather named path parameters and do substitution on the URL
|
# gather named path parameters and do substitution on the URL
|
||||||
|
body_parameters: dict[str, t.Any] | None
|
||||||
if self.parameters:
|
if self.parameters:
|
||||||
path_parameters = {}
|
path_parameters = {}
|
||||||
body_parameters = {}
|
body_parameters = {}
|
||||||
@@ -143,14 +166,14 @@ class RestOperation(object):
|
|||||||
try:
|
try:
|
||||||
if body_parameters:
|
if body_parameters:
|
||||||
body_parameters_json = json.dumps(body_parameters)
|
body_parameters_json = json.dumps(body_parameters)
|
||||||
response = self.session.request.open(method=self.method, url=url, data=body_parameters_json)
|
response = self.session.request.open(
|
||||||
|
method=self.method, url=url, data=body_parameters_json
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
response = self.session.request.open(method=self.method, url=url)
|
response = self.session.request.open(method=self.method, url=url)
|
||||||
request_error = False
|
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
# An HTTPError has the same methods available as a valid response from request.open
|
# An HTTPError has the same methods available as a valid response from request.open
|
||||||
response = e
|
response = e
|
||||||
request_error = True
|
|
||||||
|
|
||||||
# Return the result if JSON and success ({} for empty responses)
|
# Return the result if JSON and success ({} for empty responses)
|
||||||
# Raise an exception if there was a failure.
|
# Raise an exception if there was a failure.
|
||||||
@@ -163,19 +186,20 @@ class RestOperation(object):
|
|||||||
if result or result == {}:
|
if result or result == {}:
|
||||||
if result_code and result_code < 400:
|
if result_code and result_code < 400:
|
||||||
return result
|
return result
|
||||||
else:
|
|
||||||
raise RestOperationException(result)
|
raise RestOperationException(result)
|
||||||
|
|
||||||
# Raise a generic RestOperationException if this fails
|
# Raise a generic RestOperationException if this fails
|
||||||
raise RestOperationException({"status": result_code, "errors": [{"message": "REST Operation Failed"}]})
|
raise RestOperationException(
|
||||||
|
{"status": result_code, "errors": [{"message": "REST Operation Failed"}]}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Resource(object):
|
class Resource:
|
||||||
"""Implement basic CRUD operations against a path."""
|
"""Implement basic CRUD operations against a path."""
|
||||||
|
|
||||||
def __init__(self, session):
|
def __init__(self, session: "ECSSession") -> None:
|
||||||
self.session = session
|
self.session = session
|
||||||
self.parameters = {}
|
self.parameters: dict[str, t.Any] = {}
|
||||||
|
|
||||||
for url in session._spec.get("paths").keys():
|
for url in session._spec.get("paths").keys():
|
||||||
methods = session._spec.get("paths").get(url)
|
methods = session._spec.get("paths").get(url)
|
||||||
@@ -196,13 +220,20 @@ class Resource(object):
|
|||||||
elif method.lower() == "patch":
|
elif method.lower() == "patch":
|
||||||
operation_name = "Patch"
|
operation_name = "Patch"
|
||||||
else:
|
else:
|
||||||
raise SessionConfigurationException(to_native("Invalid REST method type {0}".format(method)))
|
raise SessionConfigurationException(
|
||||||
|
f"Invalid REST method type {method}"
|
||||||
|
)
|
||||||
|
|
||||||
# Get the non-parameter parts of the URL and append to the operation name
|
# Get the non-parameter parts of the URL and append to the operation name
|
||||||
# e.g /application/version -> GetApplicationVersion
|
# e.g /application/version -> GetApplicationVersion
|
||||||
# e.g. /application/{id} -> GetApplication
|
# e.g. /application/{id} -> GetApplication
|
||||||
# This may lead to duplicates, which we must prevent.
|
# This may lead to duplicates, which we must prevent.
|
||||||
operation_name += re.sub(r"{(.*)}", "", url).replace("/", " ").title().replace(" ", "")
|
operation_name += (
|
||||||
|
re.sub(r"{(.*)}", "", url)
|
||||||
|
.replace("/", " ")
|
||||||
|
.title()
|
||||||
|
.replace(" ", "")
|
||||||
|
)
|
||||||
operation_spec["operationId"] = operation_name
|
operation_spec["operationId"] = operation_name
|
||||||
|
|
||||||
op = RestOperation(session, url, method, parameters)
|
op = RestOperation(session, url, method, parameters)
|
||||||
@@ -210,19 +241,19 @@ class Resource(object):
|
|||||||
|
|
||||||
|
|
||||||
# Session to encapsulate the connection parameters of the module_utils Request object, the api spec, etc
|
# Session to encapsulate the connection parameters of the module_utils Request object, the api spec, etc
|
||||||
class ECSSession(object):
|
class ECSSession:
|
||||||
def __init__(self, name, **kwargs):
|
def __init__(self, name: str, **kwargs) -> None:
|
||||||
"""
|
"""
|
||||||
Initialize our session
|
Initialize our session
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self._set_config(name, **kwargs)
|
self._set_config(name, **kwargs)
|
||||||
|
|
||||||
def client(self):
|
def client(self) -> Resource:
|
||||||
resource = Resource(self)
|
resource = Resource(self)
|
||||||
return resource
|
return resource
|
||||||
|
|
||||||
def _set_config(self, name, **kwargs):
|
def _set_config(self, name: str, **kwargs) -> None:
|
||||||
headers = {
|
headers = {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"Connection": "keep-alive",
|
"Connection": "keep-alive",
|
||||||
@@ -235,100 +266,136 @@ class ECSSession(object):
|
|||||||
if self._config:
|
if self._config:
|
||||||
break
|
break
|
||||||
if self._config is None:
|
if self._config is None:
|
||||||
raise SessionConfigurationException(to_native("No Configuration Found."))
|
raise SessionConfigurationException("No Configuration Found.")
|
||||||
|
|
||||||
# set up auth if passed
|
# set up auth if passed
|
||||||
entrust_api_user = self.get_config("entrust_api_user")
|
entrust_api_user: str | None = self.get_config("entrust_api_user")
|
||||||
entrust_api_key = self.get_config("entrust_api_key")
|
entrust_api_key: str | None = self.get_config("entrust_api_key")
|
||||||
if entrust_api_user and entrust_api_key:
|
if entrust_api_user and entrust_api_key:
|
||||||
self.request.url_username = entrust_api_user
|
self.request.url_username = entrust_api_user
|
||||||
self.request.url_password = entrust_api_key
|
self.request.url_password = entrust_api_key
|
||||||
else:
|
else:
|
||||||
raise SessionConfigurationException(to_native("User and key must be provided."))
|
raise SessionConfigurationException("User and key must be provided.")
|
||||||
|
|
||||||
# set up client certificate if passed (support all-in one or cert + key)
|
# set up client certificate if passed (support all-in one or cert + key)
|
||||||
entrust_api_cert = self.get_config("entrust_api_cert")
|
entrust_api_cert: str | None = self.get_config("entrust_api_cert")
|
||||||
entrust_api_cert_key = self.get_config("entrust_api_cert_key")
|
entrust_api_cert_key: str | None = self.get_config("entrust_api_cert_key")
|
||||||
if entrust_api_cert:
|
if entrust_api_cert:
|
||||||
self.request.client_cert = entrust_api_cert
|
self.request.client_cert = entrust_api_cert
|
||||||
if entrust_api_cert_key:
|
if entrust_api_cert_key:
|
||||||
self.request.client_key = entrust_api_cert_key
|
self.request.client_key = entrust_api_cert_key
|
||||||
else:
|
else:
|
||||||
raise SessionConfigurationException(to_native("Client certificate for authentication to the API must be provided."))
|
raise SessionConfigurationException(
|
||||||
|
"Client certificate for authentication to the API must be provided."
|
||||||
|
)
|
||||||
|
|
||||||
# set up the spec
|
# set up the spec
|
||||||
entrust_api_specification_path = self.get_config("entrust_api_specification_path")
|
entrust_api_specification_path = self.get_config(
|
||||||
|
"entrust_api_specification_path"
|
||||||
|
)
|
||||||
|
if not isinstance(entrust_api_specification_path, str):
|
||||||
|
raise SessionConfigurationException(
|
||||||
|
"entrust_api_specification_path must be a string."
|
||||||
|
)
|
||||||
|
|
||||||
if not entrust_api_specification_path.startswith("http") and not os.path.isfile(entrust_api_specification_path):
|
if not entrust_api_specification_path.startswith("http") and not os.path.isfile(
|
||||||
raise SessionConfigurationException(to_native("OpenAPI specification was not found at location {0}.".format(entrust_api_specification_path)))
|
entrust_api_specification_path
|
||||||
|
):
|
||||||
|
raise SessionConfigurationException(
|
||||||
|
f"OpenAPI specification was not found at location {entrust_api_specification_path}."
|
||||||
|
)
|
||||||
if not valid_file_format.match(entrust_api_specification_path):
|
if not valid_file_format.match(entrust_api_specification_path):
|
||||||
raise SessionConfigurationException(to_native("OpenAPI specification filename must end in .json, .yml or .yaml"))
|
raise SessionConfigurationException(
|
||||||
|
"OpenAPI specification filename must end in .json, .yml or .yaml"
|
||||||
|
)
|
||||||
|
|
||||||
self.verify = True
|
self.verify = True
|
||||||
|
|
||||||
if entrust_api_specification_path.startswith("http"):
|
if entrust_api_specification_path.startswith("http"):
|
||||||
try:
|
try:
|
||||||
http_response = Request().open(method="GET", url=entrust_api_specification_path)
|
http_response = Request().open(
|
||||||
|
method="GET", url=entrust_api_specification_path
|
||||||
|
)
|
||||||
http_response_contents = http_response.read()
|
http_response_contents = http_response.read()
|
||||||
if entrust_api_specification_path.endswith(".json"):
|
if entrust_api_specification_path.endswith(".json"):
|
||||||
self._spec = json.load(http_response_contents)
|
self._spec = json.load(http_response_contents)
|
||||||
elif entrust_api_specification_path.endswith(".yml") or entrust_api_specification_path.endswith(".yaml"):
|
elif entrust_api_specification_path.endswith(
|
||||||
|
".yml"
|
||||||
|
) or entrust_api_specification_path.endswith(".yaml"):
|
||||||
self._spec = yaml.safe_load(http_response_contents)
|
self._spec = yaml.safe_load(http_response_contents)
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
raise SessionConfigurationException(to_native("Error downloading specification from address '{0}', received error code '{1}'".format(
|
raise SessionConfigurationException(
|
||||||
entrust_api_specification_path, e.getcode())))
|
f"Error downloading specification from address '{entrust_api_specification_path}', received error code '{e.getcode()}'"
|
||||||
|
) from e
|
||||||
else:
|
else:
|
||||||
with open(entrust_api_specification_path) as f:
|
with open(entrust_api_specification_path, "rb") as f:
|
||||||
if ".json" in entrust_api_specification_path:
|
if ".json" in entrust_api_specification_path:
|
||||||
self._spec = json.load(f)
|
self._spec = json.load(f)
|
||||||
elif ".yml" in entrust_api_specification_path or ".yaml" in entrust_api_specification_path:
|
elif (
|
||||||
|
".yml" in entrust_api_specification_path
|
||||||
|
or ".yaml" in entrust_api_specification_path
|
||||||
|
):
|
||||||
self._spec = yaml.safe_load(f)
|
self._spec = yaml.safe_load(f)
|
||||||
|
|
||||||
def get_config(self, item):
|
def get_config(self, item: str) -> t.Any | None:
|
||||||
return self._config.get(item, None)
|
return self._config.get(item, None)
|
||||||
|
|
||||||
def _read_config_vars(self, name, **kwargs):
|
def _read_config_vars(self, name: str, **kwargs) -> dict[str, t.Any]:
|
||||||
"""Read configuration from variables passed to the module."""
|
"""Read configuration from variables passed to the module."""
|
||||||
config = {}
|
config = {}
|
||||||
|
|
||||||
entrust_api_specification_path = kwargs.get("entrust_api_specification_path")
|
entrust_api_specification_path = kwargs.get("entrust_api_specification_path")
|
||||||
if not entrust_api_specification_path or (not entrust_api_specification_path.startswith("http") and not os.path.isfile(entrust_api_specification_path)):
|
if not entrust_api_specification_path or (
|
||||||
|
not entrust_api_specification_path.startswith("http")
|
||||||
|
and not os.path.isfile(entrust_api_specification_path)
|
||||||
|
):
|
||||||
raise SessionConfigurationException(
|
raise SessionConfigurationException(
|
||||||
to_native(
|
f"Parameter provided for entrust_api_specification_path of value '{entrust_api_specification_path}'"
|
||||||
"Parameter provided for entrust_api_specification_path of value '{0}' was not a valid file path or HTTPS address.".format(
|
" was not a valid file path or HTTPS address."
|
||||||
entrust_api_specification_path
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
for required_file in ["entrust_api_cert", "entrust_api_cert_key"]:
|
for required_file in ["entrust_api_cert", "entrust_api_cert_key"]:
|
||||||
file_path = kwargs.get(required_file)
|
file_path = kwargs.get(required_file)
|
||||||
if not file_path or not os.path.isfile(file_path):
|
if not file_path or not os.path.isfile(file_path):
|
||||||
raise SessionConfigurationException(
|
raise SessionConfigurationException(
|
||||||
to_native("Parameter provided for {0} of value '{1}' was not a valid file path.".format(required_file, file_path))
|
f"Parameter provided for {required_file} of value '{file_path}' was not a valid file path."
|
||||||
)
|
)
|
||||||
|
|
||||||
for required_var in ["entrust_api_user", "entrust_api_key"]:
|
for required_var in ["entrust_api_user", "entrust_api_key"]:
|
||||||
if not kwargs.get(required_var):
|
if not kwargs.get(required_var):
|
||||||
raise SessionConfigurationException(to_native("Parameter provided for {0} was missing.".format(required_var)))
|
raise SessionConfigurationException(
|
||||||
|
f"Parameter provided for {required_var} was missing."
|
||||||
|
)
|
||||||
|
|
||||||
config["entrust_api_cert"] = kwargs.get("entrust_api_cert")
|
config["entrust_api_cert"] = kwargs.get("entrust_api_cert")
|
||||||
config["entrust_api_cert_key"] = kwargs.get("entrust_api_cert_key")
|
config["entrust_api_cert_key"] = kwargs.get("entrust_api_cert_key")
|
||||||
config["entrust_api_specification_path"] = kwargs.get("entrust_api_specification_path")
|
config["entrust_api_specification_path"] = kwargs.get(
|
||||||
|
"entrust_api_specification_path"
|
||||||
|
)
|
||||||
config["entrust_api_user"] = kwargs.get("entrust_api_user")
|
config["entrust_api_user"] = kwargs.get("entrust_api_user")
|
||||||
config["entrust_api_key"] = kwargs.get("entrust_api_key")
|
config["entrust_api_key"] = kwargs.get("entrust_api_key")
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
def ECSClient(entrust_api_user=None, entrust_api_key=None, entrust_api_cert=None, entrust_api_cert_key=None, entrust_api_specification_path=None):
|
def ECSClient(
|
||||||
|
entrust_api_user: str | None = None,
|
||||||
|
entrust_api_key: str | None = None,
|
||||||
|
entrust_api_cert: str | None = None,
|
||||||
|
entrust_api_cert_key: str | None = None,
|
||||||
|
entrust_api_specification_path: str | None = None,
|
||||||
|
) -> Resource:
|
||||||
"""Create an ECS client"""
|
"""Create an ECS client"""
|
||||||
|
|
||||||
if not YAML_FOUND:
|
if not YAML_FOUND:
|
||||||
raise SessionConfigurationException(missing_required_lib("PyYAML"), exception=YAML_IMP_ERR)
|
raise SessionConfigurationException(
|
||||||
|
missing_required_lib("PyYAML") # TODO: pass `exception=YAML_IMP_ERR`
|
||||||
|
)
|
||||||
|
|
||||||
if entrust_api_specification_path is None:
|
if entrust_api_specification_path is None:
|
||||||
entrust_api_specification_path = "https://cloud.entrust.net/EntrustCloud/documentation/cms-api-2.1.0.yaml"
|
entrust_api_specification_path = (
|
||||||
|
"https://cloud.entrust.net/EntrustCloud/documentation/cms-api-2.1.0.yaml"
|
||||||
|
)
|
||||||
|
|
||||||
# Not functionally necessary with current uses of this module_util, but better to be explicit for future use cases
|
# Not functionally necessary with current uses of this module_util, but better to be explicit for future use cases
|
||||||
entrust_api_user = to_text(entrust_api_user)
|
entrust_api_user = to_text(entrust_api_user)
|
||||||
@@ -344,3 +411,11 @@ def ECSClient(entrust_api_user=None, entrust_api_key=None, entrust_api_cert=None
|
|||||||
entrust_api_cert_key=entrust_api_cert_key,
|
entrust_api_cert_key=entrust_api_cert_key,
|
||||||
entrust_api_specification_path=entrust_api_specification_path,
|
entrust_api_specification_path=entrust_api_specification_path,
|
||||||
).client()
|
).client()
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"ecs_client_argument_spec",
|
||||||
|
"SessionConfigurationException",
|
||||||
|
"RestOperationException",
|
||||||
|
"ECSClient",
|
||||||
|
)
|
||||||
88
plugins/module_utils/_gnupg/cli.py
Normal file
88
plugins/module_utils/_gnupg/cli.py
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
# Copyright (c) 2023, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
class GPGError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class GPGRunner(metaclass=abc.ABCMeta):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def run_command(
|
||||||
|
self, command: list[str], *, check_rc: bool = True, data: bytes | None = None
|
||||||
|
) -> tuple[int, str, str]:
|
||||||
|
"""
|
||||||
|
Run ``[gpg] + command`` and return ``(rc, stdout, stderr)``.
|
||||||
|
|
||||||
|
If ``data`` is not ``None``, it will be provided as stdin.
|
||||||
|
The code assumes it is a bytes string.
|
||||||
|
|
||||||
|
Returned stdout and stderr are native Python strings.
|
||||||
|
Pass ``check_rc=False`` to allow return codes != 0.
|
||||||
|
|
||||||
|
Raises a ``GPGError`` in case of errors.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def get_fingerprint_from_stdout(*, stdout: str) -> str:
|
||||||
|
lines = stdout.splitlines(False)
|
||||||
|
for line in lines:
|
||||||
|
if line.startswith("fpr:"):
|
||||||
|
parts = line.split(":")
|
||||||
|
if len(parts) <= 9 or not parts[9]:
|
||||||
|
raise GPGError(
|
||||||
|
f'Result line "{line}" does not have fingerprint as 10th component'
|
||||||
|
)
|
||||||
|
return parts[9]
|
||||||
|
raise GPGError(f'Cannot extract fingerprint from stdout "{stdout}"')
|
||||||
|
|
||||||
|
|
||||||
|
def get_fingerprint_from_file(*, gpg_runner: GPGRunner, path: str) -> str:
|
||||||
|
if not os.path.exists(path):
|
||||||
|
raise GPGError(f"{path} does not exist")
|
||||||
|
stdout = gpg_runner.run_command(
|
||||||
|
[
|
||||||
|
"--no-keyring",
|
||||||
|
"--with-colons",
|
||||||
|
"--import-options",
|
||||||
|
"show-only",
|
||||||
|
"--import",
|
||||||
|
path,
|
||||||
|
],
|
||||||
|
check_rc=True,
|
||||||
|
)[1]
|
||||||
|
return get_fingerprint_from_stdout(stdout=stdout)
|
||||||
|
|
||||||
|
|
||||||
|
def get_fingerprint_from_bytes(*, gpg_runner: GPGRunner, content: bytes) -> str:
|
||||||
|
stdout = gpg_runner.run_command(
|
||||||
|
[
|
||||||
|
"--no-keyring",
|
||||||
|
"--with-colons",
|
||||||
|
"--import-options",
|
||||||
|
"show-only",
|
||||||
|
"--import",
|
||||||
|
"/dev/stdin",
|
||||||
|
],
|
||||||
|
data=content,
|
||||||
|
check_rc=True,
|
||||||
|
)[1]
|
||||||
|
return get_fingerprint_from_stdout(stdout=stdout)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"GPGError",
|
||||||
|
"GPGRunner",
|
||||||
|
"get_fingerprint_from_stdout",
|
||||||
|
"get_fingerprint_from_file",
|
||||||
|
"get_fingerprint_from_bytes",
|
||||||
|
)
|
||||||
@@ -1,41 +1,50 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
|
||||||
# Copyright (c) 2016, Yanis Guenane <yanis+ansible@guenane.org>
|
# Copyright (c) 2016, Yanis Guenane <yanis+ansible@guenane.org>
|
||||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
__metaclass__ = type
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
|
||||||
def load_file(path, module=None):
|
if t.TYPE_CHECKING:
|
||||||
'''
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
|
||||||
|
|
||||||
|
def load_file(*, path: str | os.PathLike, module: AnsibleModule | None = None) -> bytes:
|
||||||
|
"""
|
||||||
Load the file as a bytes string.
|
Load the file as a bytes string.
|
||||||
'''
|
"""
|
||||||
try:
|
try:
|
||||||
with open(path, 'rb') as f:
|
with open(path, "rb") as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
if module is None:
|
if module is None:
|
||||||
raise
|
raise
|
||||||
module.fail_json('Error while loading {0} - {1}'.format(path, str(exc)))
|
module.fail_json(f"Error while loading {path} - {exc}")
|
||||||
|
|
||||||
|
|
||||||
def load_file_if_exists(path, module=None, ignore_errors=False):
|
def load_file_if_exists(
|
||||||
'''
|
*,
|
||||||
|
path: str | os.PathLike,
|
||||||
|
module: AnsibleModule | None = None,
|
||||||
|
ignore_errors: bool = False,
|
||||||
|
) -> bytes | None:
|
||||||
|
"""
|
||||||
Load the file as a bytes string. If the file does not exist, ``None`` is returned.
|
Load the file as a bytes string. If the file does not exist, ``None`` is returned.
|
||||||
|
|
||||||
If ``ignore_errors`` is ``True``, will ignore errors. Otherwise, errors are
|
If ``ignore_errors`` is ``True``, will ignore errors. Otherwise, errors are
|
||||||
raised as exceptions if ``module`` is not specified, and result in ``module.fail_json``
|
raised as exceptions if ``module`` is not specified, and result in ``module.fail_json``
|
||||||
being called when ``module`` is specified.
|
being called when ``module`` is specified.
|
||||||
'''
|
"""
|
||||||
try:
|
try:
|
||||||
with open(path, 'rb') as f:
|
with open(path, "rb") as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
except EnvironmentError as exc:
|
except EnvironmentError as exc:
|
||||||
if exc.errno == errno.ENOENT:
|
if exc.errno == errno.ENOENT:
|
||||||
@@ -44,20 +53,26 @@ def load_file_if_exists(path, module=None, ignore_errors=False):
|
|||||||
return None
|
return None
|
||||||
if module is None:
|
if module is None:
|
||||||
raise
|
raise
|
||||||
module.fail_json('Error while loading {0} - {1}'.format(path, str(exc)))
|
module.fail_json(f"Error while loading {path} - {exc}")
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
if ignore_errors:
|
if ignore_errors:
|
||||||
return None
|
return None
|
||||||
if module is None:
|
if module is None:
|
||||||
raise
|
raise
|
||||||
module.fail_json('Error while loading {0} - {1}'.format(path, str(exc)))
|
module.fail_json(f"Error while loading {path} - {exc}")
|
||||||
|
|
||||||
|
|
||||||
def write_file(module, content, default_mode=None, path=None):
|
def write_file(
|
||||||
'''
|
*,
|
||||||
|
module: AnsibleModule,
|
||||||
|
content: bytes,
|
||||||
|
default_mode: str | int | None = None,
|
||||||
|
path: str | os.PathLike | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
Writes content into destination file as securely as possible.
|
Writes content into destination file as securely as possible.
|
||||||
Uses file arguments from module.
|
Uses file arguments from module.
|
||||||
'''
|
"""
|
||||||
# Find out parameters for file
|
# Find out parameters for file
|
||||||
try:
|
try:
|
||||||
file_args = module.load_file_common_arguments(module.params, path=path)
|
file_args = module.load_file_common_arguments(module.params, path=path)
|
||||||
@@ -66,11 +81,11 @@ def write_file(module, content, default_mode=None, path=None):
|
|||||||
# pre-2.10 behavior of module_utils/crypto.py for older Ansible versions.
|
# pre-2.10 behavior of module_utils/crypto.py for older Ansible versions.
|
||||||
file_args = module.load_file_common_arguments(module.params)
|
file_args = module.load_file_common_arguments(module.params)
|
||||||
if path is not None:
|
if path is not None:
|
||||||
file_args['path'] = path
|
file_args["path"] = path
|
||||||
if file_args['mode'] is None:
|
if file_args["mode"] is None:
|
||||||
file_args['mode'] = default_mode
|
file_args["mode"] = default_mode
|
||||||
# Create tempfile name
|
# Create tempfile name
|
||||||
tmp_fd, tmp_name = tempfile.mkstemp(prefix=b'.ansible_tmp')
|
tmp_fd, tmp_name = tempfile.mkstemp(prefix=b".ansible_tmp")
|
||||||
try:
|
try:
|
||||||
os.close(tmp_fd)
|
os.close(tmp_fd)
|
||||||
except Exception:
|
except Exception:
|
||||||
@@ -87,18 +102,23 @@ def write_file(module, content, default_mode=None, path=None):
|
|||||||
os.remove(tmp_name)
|
os.remove(tmp_name)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
module.fail_json(msg='Error while writing result into temporary file: {0}'.format(e))
|
module.fail_json(msg=f"Error while writing result into temporary file: {e}")
|
||||||
# Update destination to wanted permissions
|
# Update destination to wanted permissions
|
||||||
if os.path.exists(file_args['path']):
|
if os.path.exists(file_args["path"]):
|
||||||
module.set_fs_attributes_if_different(file_args, False)
|
module.set_fs_attributes_if_different(file_args, False)
|
||||||
# Move tempfile to final destination
|
# Move tempfile to final destination
|
||||||
module.atomic_move(os.path.abspath(tmp_name), os.path.abspath(file_args['path']))
|
module.atomic_move(
|
||||||
|
os.path.abspath(tmp_name), os.path.abspath(file_args["path"])
|
||||||
|
)
|
||||||
# Try to update permissions again
|
# Try to update permissions again
|
||||||
if not module.check_file_absent_if_check_mode(file_args['path']):
|
if not module.check_file_absent_if_check_mode(file_args["path"]):
|
||||||
module.set_fs_attributes_if_different(file_args, False)
|
module.set_fs_attributes_if_different(file_args, False)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
try:
|
try:
|
||||||
os.remove(tmp_name)
|
os.remove(tmp_name)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
module.fail_json(msg='Error while writing result: {0}'.format(e))
|
module.fail_json(msg=f"Error while writing result: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("load_file", "load_file_if_exists", "write_file")
|
||||||
473
plugins/module_utils/_openssh/backends/common.py
Normal file
473
plugins/module_utils/_openssh/backends/common.py
Normal file
@@ -0,0 +1,473 @@
|
|||||||
|
# Copyright (c) 2021, Andrew Pantuso (@ajpantuso) <ajpantuso@gmail.com>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import os
|
||||||
|
import stat
|
||||||
|
import traceback
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._openssh.utils import (
|
||||||
|
parse_openssh_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._openssh.certificate import (
|
||||||
|
OpensshCertificateTimeParameters,
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import (
|
||||||
|
CertificateIssuerPrivateKeyTypes,
|
||||||
|
PrivateKeyTypes,
|
||||||
|
)
|
||||||
|
|
||||||
|
Param = t.ParamSpec("Param")
|
||||||
|
|
||||||
|
|
||||||
|
def restore_on_failure(
|
||||||
|
f: t.Callable[t.Concatenate[AnsibleModule, str | os.PathLike, Param], None],
|
||||||
|
) -> t.Callable[t.Concatenate[AnsibleModule, str | os.PathLike, Param], None]:
|
||||||
|
def backup_and_restore(
|
||||||
|
module: AnsibleModule, path: str | os.PathLike, *args, **kwargs
|
||||||
|
) -> None:
|
||||||
|
backup_file = module.backup_local(path) if os.path.exists(path) else None
|
||||||
|
|
||||||
|
try:
|
||||||
|
f(module, path, *args, **kwargs)
|
||||||
|
except Exception:
|
||||||
|
if backup_file is not None:
|
||||||
|
module.atomic_move(os.path.abspath(backup_file), os.path.abspath(path))
|
||||||
|
raise
|
||||||
|
module.add_cleanup_file(backup_file)
|
||||||
|
|
||||||
|
return backup_and_restore
|
||||||
|
|
||||||
|
|
||||||
|
@restore_on_failure
|
||||||
|
def safe_atomic_move(
|
||||||
|
module: AnsibleModule, path: str | os.PathLike, destination: str | os.PathLike
|
||||||
|
) -> None:
|
||||||
|
module.atomic_move(os.path.abspath(path), os.path.abspath(destination))
|
||||||
|
|
||||||
|
|
||||||
|
def _restore_all_on_failure(
|
||||||
|
f: t.Callable[
|
||||||
|
t.Concatenate[
|
||||||
|
OpensshModule, list[tuple[str | os.PathLike, str | os.PathLike]], Param
|
||||||
|
],
|
||||||
|
None,
|
||||||
|
],
|
||||||
|
) -> t.Callable[
|
||||||
|
t.Concatenate[
|
||||||
|
OpensshModule, list[tuple[str | os.PathLike, str | os.PathLike]], Param
|
||||||
|
],
|
||||||
|
None,
|
||||||
|
]:
|
||||||
|
def backup_and_restore(
|
||||||
|
self: OpensshModule,
|
||||||
|
sources_and_destinations: list[tuple[str | os.PathLike, str | os.PathLike]],
|
||||||
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
) -> None:
|
||||||
|
backups = [
|
||||||
|
(d, self.module.backup_local(d))
|
||||||
|
for s, d in sources_and_destinations
|
||||||
|
if os.path.exists(d)
|
||||||
|
]
|
||||||
|
|
||||||
|
try:
|
||||||
|
f(self, sources_and_destinations, *args, **kwargs)
|
||||||
|
except Exception:
|
||||||
|
for destination, backup in backups:
|
||||||
|
self.module.atomic_move(
|
||||||
|
os.path.abspath(backup), os.path.abspath(destination)
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
for destination, backup in backups:
|
||||||
|
self.module.add_cleanup_file(backup)
|
||||||
|
|
||||||
|
return backup_and_restore
|
||||||
|
|
||||||
|
|
||||||
|
class OpensshModule(metaclass=abc.ABCMeta):
|
||||||
|
def __init__(self, *, module: AnsibleModule) -> None:
|
||||||
|
self.module = module
|
||||||
|
|
||||||
|
self.changed: bool = False
|
||||||
|
self.check_mode: bool = self.module.check_mode
|
||||||
|
|
||||||
|
def execute(self) -> t.NoReturn:
|
||||||
|
try:
|
||||||
|
self._execute()
|
||||||
|
except Exception as e:
|
||||||
|
self.module.fail_json(
|
||||||
|
msg=f"unexpected error occurred: {e}",
|
||||||
|
exception=traceback.format_exc(),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.module.exit_json(**self.result)
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _execute(self) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
def result(self) -> dict[str, t.Any]:
|
||||||
|
result = self._result
|
||||||
|
|
||||||
|
result["changed"] = self.changed
|
||||||
|
|
||||||
|
if self.module._diff: # pylint: disable=protected-access
|
||||||
|
result["diff"] = self.diff
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _result(self) -> dict[str, t.Any]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abc.abstractmethod
|
||||||
|
def diff(self) -> dict[str, t.Any]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def skip_if_check_mode(f: t.Callable[Param, None]) -> t.Callable[Param, None]:
|
||||||
|
def wrapper(self, *args, **kwargs) -> None:
|
||||||
|
if not self.check_mode:
|
||||||
|
f(self, *args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper # type: ignore
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def trigger_change(f: t.Callable[Param, None]) -> t.Callable[Param, None]:
|
||||||
|
def wrapper(self, *args, **kwargs) -> None:
|
||||||
|
f(self, *args, **kwargs)
|
||||||
|
self.changed = True
|
||||||
|
|
||||||
|
return wrapper # type: ignore
|
||||||
|
|
||||||
|
def _check_if_base_dir(self, path: str | os.PathLike) -> None:
|
||||||
|
base_dir = os.path.dirname(path) or "."
|
||||||
|
if not os.path.isdir(base_dir):
|
||||||
|
self.module.fail_json(
|
||||||
|
name=base_dir,
|
||||||
|
msg=f"The directory {base_dir} does not exist or the file is not a directory",
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_ssh_version(self) -> str | None:
|
||||||
|
ssh_bin = self.module.get_bin_path("ssh")
|
||||||
|
if not ssh_bin:
|
||||||
|
return None
|
||||||
|
return parse_openssh_version(
|
||||||
|
self.module.run_command([ssh_bin, "-V", "-q"], check_rc=True)[2].strip()
|
||||||
|
)
|
||||||
|
|
||||||
|
@_restore_all_on_failure
|
||||||
|
def _safe_secure_move(
|
||||||
|
self,
|
||||||
|
sources_and_destinations: list[tuple[str | os.PathLike, str | os.PathLike]],
|
||||||
|
) -> None:
|
||||||
|
"""Moves a list of files from 'source' to 'destination' and restores 'destination' from backup upon failure.
|
||||||
|
If 'destination' does not already exist, then 'source' permissions are preserved to prevent
|
||||||
|
exposing protected data ('atomic_move' uses the 'destination' base directory mask for
|
||||||
|
permissions if 'destination' does not already exists).
|
||||||
|
"""
|
||||||
|
for source, destination in sources_and_destinations:
|
||||||
|
if os.path.exists(destination):
|
||||||
|
self.module.atomic_move(
|
||||||
|
os.path.abspath(source), os.path.abspath(destination)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.module.preserved_copy(source, destination)
|
||||||
|
|
||||||
|
def _update_permissions(self, path: str | os.PathLike) -> None:
|
||||||
|
file_args = self.module.load_file_common_arguments(self.module.params)
|
||||||
|
file_args["path"] = path
|
||||||
|
|
||||||
|
if not self.module.check_file_absent_if_check_mode(path):
|
||||||
|
self.changed = self.module.set_fs_attributes_if_different(
|
||||||
|
file_args, self.changed
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.changed = True
|
||||||
|
|
||||||
|
|
||||||
|
class KeygenCommand:
|
||||||
|
def __init__(self, module: AnsibleModule) -> None:
|
||||||
|
self._bin_path = module.get_bin_path("ssh-keygen", True)
|
||||||
|
self._run_command = module.run_command
|
||||||
|
|
||||||
|
def generate_certificate(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
certificate_path: str,
|
||||||
|
identifier: str,
|
||||||
|
options: list[str] | None,
|
||||||
|
pkcs11_provider: str | None,
|
||||||
|
principals: list[str] | None,
|
||||||
|
serial_number: int | None,
|
||||||
|
signature_algorithm: str | None,
|
||||||
|
signing_key_path: str,
|
||||||
|
cert_type: t.Literal["host", "user"] | None,
|
||||||
|
time_parameters: OpensshCertificateTimeParameters,
|
||||||
|
use_agent: bool,
|
||||||
|
**kwargs,
|
||||||
|
) -> tuple[int, str, str]:
|
||||||
|
args = [self._bin_path, "-s", signing_key_path, "-P", "", "-I", identifier]
|
||||||
|
|
||||||
|
if options:
|
||||||
|
for option in options:
|
||||||
|
args.extend(["-O", option])
|
||||||
|
if pkcs11_provider:
|
||||||
|
args.extend(["-D", pkcs11_provider])
|
||||||
|
if principals:
|
||||||
|
args.extend(["-n", ",".join(principals)])
|
||||||
|
if serial_number is not None:
|
||||||
|
args.extend(["-z", str(serial_number)])
|
||||||
|
if cert_type == "host":
|
||||||
|
args.extend(["-h"])
|
||||||
|
if use_agent:
|
||||||
|
args.extend(["-U"])
|
||||||
|
if time_parameters.validity_string:
|
||||||
|
args.extend(["-V", time_parameters.validity_string])
|
||||||
|
if signature_algorithm:
|
||||||
|
args.extend(["-t", signature_algorithm])
|
||||||
|
args.append(certificate_path)
|
||||||
|
|
||||||
|
return self._run_command(args, **kwargs)
|
||||||
|
|
||||||
|
def generate_keypair(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
private_key_path: str,
|
||||||
|
size: int,
|
||||||
|
key_type: str,
|
||||||
|
comment: str | None,
|
||||||
|
**kwargs,
|
||||||
|
) -> tuple[int, str, str]:
|
||||||
|
args = [
|
||||||
|
self._bin_path,
|
||||||
|
"-q",
|
||||||
|
"-N",
|
||||||
|
"",
|
||||||
|
"-b",
|
||||||
|
str(size),
|
||||||
|
"-t",
|
||||||
|
key_type,
|
||||||
|
"-f",
|
||||||
|
private_key_path,
|
||||||
|
"-C",
|
||||||
|
comment or "",
|
||||||
|
]
|
||||||
|
|
||||||
|
# "y" must be entered in response to the "overwrite" prompt
|
||||||
|
data = "y" if os.path.exists(private_key_path) else None
|
||||||
|
|
||||||
|
return self._run_command(args, data=data, **kwargs)
|
||||||
|
|
||||||
|
def get_certificate_info(
|
||||||
|
self, *, certificate_path: str, **kwargs
|
||||||
|
) -> tuple[int, str, str]:
|
||||||
|
return self._run_command(
|
||||||
|
[self._bin_path, "-L", "-f", certificate_path], **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_matching_public_key(
|
||||||
|
self, *, private_key_path: str, **kwargs
|
||||||
|
) -> tuple[int, str, str]:
|
||||||
|
return self._run_command(
|
||||||
|
[self._bin_path, "-P", "", "-y", "-f", private_key_path], **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_private_key(
|
||||||
|
self, *, private_key_path: str, **kwargs
|
||||||
|
) -> tuple[int, str, str]:
|
||||||
|
return self._run_command(
|
||||||
|
[self._bin_path, "-l", "-f", private_key_path], **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
def update_comment(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
private_key_path: str,
|
||||||
|
comment: str,
|
||||||
|
force_new_format: bool = True,
|
||||||
|
**kwargs,
|
||||||
|
) -> tuple[int, str, str]:
|
||||||
|
if os.path.exists(private_key_path) and not os.access(
|
||||||
|
private_key_path, os.W_OK
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
os.chmod(private_key_path, stat.S_IWUSR + stat.S_IRUSR)
|
||||||
|
except (IOError, OSError) as e:
|
||||||
|
raise ValueError(
|
||||||
|
f"The private key at {private_key_path} is not writeable preventing a comment update ({e})"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
command = [self._bin_path, "-q"]
|
||||||
|
if force_new_format:
|
||||||
|
command.append("-o")
|
||||||
|
command.extend(["-c", "-C", comment, "-f", private_key_path])
|
||||||
|
return self._run_command(command, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
_PrivateKey = t.TypeVar("_PrivateKey", bound="PrivateKey")
|
||||||
|
|
||||||
|
|
||||||
|
class PrivateKey:
|
||||||
|
def __init__(
|
||||||
|
self, *, size: int, key_type: str, fingerprint: str, key_format: str = ""
|
||||||
|
) -> None:
|
||||||
|
self._size = size
|
||||||
|
self._type = key_type
|
||||||
|
self._fingerprint = fingerprint
|
||||||
|
self._format = key_format
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size(self) -> int:
|
||||||
|
return self._size
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type(self) -> str:
|
||||||
|
return self._type
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fingerprint(self) -> str:
|
||||||
|
return self._fingerprint
|
||||||
|
|
||||||
|
@property
|
||||||
|
def format(self) -> str:
|
||||||
|
return self._format
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_string(cls: t.Type[_PrivateKey], string: str) -> _PrivateKey:
|
||||||
|
properties = string.split()
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
size=int(properties[0]),
|
||||||
|
key_type=properties[-1][1:-1].lower(),
|
||||||
|
fingerprint=properties[1],
|
||||||
|
)
|
||||||
|
|
||||||
|
def to_dict(self) -> dict[str, t.Any]:
|
||||||
|
return {
|
||||||
|
"size": self._size,
|
||||||
|
"type": self._type,
|
||||||
|
"fingerprint": self._fingerprint,
|
||||||
|
"format": self._format,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
_PublicKey = t.TypeVar("_PublicKey", bound="PublicKey")
|
||||||
|
|
||||||
|
|
||||||
|
class PublicKey:
|
||||||
|
def __init__(self, *, type_string: str, data: str, comment: str | None) -> None:
|
||||||
|
self._type_string = type_string
|
||||||
|
self._data = data
|
||||||
|
self._comment = comment
|
||||||
|
|
||||||
|
def __eq__(self, other: object) -> bool:
|
||||||
|
if not isinstance(other, type(self)):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return all(
|
||||||
|
[
|
||||||
|
self._type_string == other._type_string,
|
||||||
|
self._data == other._data,
|
||||||
|
(
|
||||||
|
(self._comment == other._comment)
|
||||||
|
if self._comment is not None and other._comment is not None
|
||||||
|
else True
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def __ne__(self, other: object) -> bool:
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"{self._type_string} {self._data}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def comment(self) -> str | None:
|
||||||
|
return self._comment
|
||||||
|
|
||||||
|
@comment.setter
|
||||||
|
def comment(self, value: str | None) -> None:
|
||||||
|
self._comment = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self) -> str:
|
||||||
|
return self._data
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type_string(self) -> str:
|
||||||
|
return self._type_string
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_string(cls: t.Type[_PublicKey], string: str) -> _PublicKey:
|
||||||
|
properties = string.strip("\n").split(" ", 2)
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
type_string=properties[0],
|
||||||
|
data=properties[1],
|
||||||
|
comment=properties[2] if len(properties) > 2 else "",
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def load(cls: t.Type[_PublicKey], path: str | os.PathLike) -> _PublicKey | None:
|
||||||
|
with open(path, "r", encoding="utf-8") as f:
|
||||||
|
properties = f.read().strip(" \n").split(" ", 2)
|
||||||
|
|
||||||
|
if len(properties) < 2:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
type_string=properties[0],
|
||||||
|
data=properties[1],
|
||||||
|
comment="" if len(properties) <= 2 else properties[2],
|
||||||
|
)
|
||||||
|
|
||||||
|
def to_dict(self) -> dict[str, t.Any]:
|
||||||
|
return {
|
||||||
|
"comment": self._comment,
|
||||||
|
"public_key": self._data,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def parse_private_key_format(
|
||||||
|
*,
|
||||||
|
path: str | os.PathLike,
|
||||||
|
) -> t.Literal["SSH", "PKCS8", "PKCS1", ""]:
|
||||||
|
with open(path, "r", encoding="utf-8") as file:
|
||||||
|
header = file.readline().strip()
|
||||||
|
|
||||||
|
if header == "-----BEGIN OPENSSH PRIVATE KEY-----":
|
||||||
|
return "SSH"
|
||||||
|
if header == "-----BEGIN PRIVATE KEY-----":
|
||||||
|
return "PKCS8"
|
||||||
|
if header == "-----BEGIN RSA PRIVATE KEY-----":
|
||||||
|
return "PKCS1"
|
||||||
|
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"restore_on_failure",
|
||||||
|
"safe_atomic_move",
|
||||||
|
"OpensshModule",
|
||||||
|
"KeygenCommand",
|
||||||
|
"PrivateKey",
|
||||||
|
"PublicKey",
|
||||||
|
"parse_private_key_format",
|
||||||
|
)
|
||||||
590
plugins/module_utils/_openssh/backends/keypair_backend.py
Normal file
590
plugins/module_utils/_openssh/backends/keypair_backend.py
Normal file
@@ -0,0 +1,590 @@
|
|||||||
|
# Copyright (c) 2018, David Kainz <dkainz@mgit.at> <dave.jokain@gmx.at>
|
||||||
|
# Copyright (c) 2021, Andrew Pantuso (@ajpantuso) <ajpantuso@gmail.com>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import os
|
||||||
|
import typing as t
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import missing_required_lib
|
||||||
|
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._cryptography_dep import (
|
||||||
|
COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._openssh.backends.common import (
|
||||||
|
KeygenCommand,
|
||||||
|
OpensshModule,
|
||||||
|
PrivateKey,
|
||||||
|
PublicKey,
|
||||||
|
parse_private_key_format,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._openssh.cryptography import (
|
||||||
|
CRYPTOGRAPHY_VERSION,
|
||||||
|
HAS_OPENSSH_SUPPORT,
|
||||||
|
InvalidCommentError,
|
||||||
|
InvalidPassphraseError,
|
||||||
|
InvalidPrivateKeyFileError,
|
||||||
|
OpenSSHError,
|
||||||
|
OpensshKeypair,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._openssh.utils import (
|
||||||
|
any_in,
|
||||||
|
file_mode,
|
||||||
|
secure_write,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._version import (
|
||||||
|
LooseVersion,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import (
|
||||||
|
CertificateIssuerPrivateKeyTypes,
|
||||||
|
PrivateKeyTypes,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class KeypairBackend(OpensshModule, metaclass=abc.ABCMeta):
|
||||||
|
|
||||||
|
def __init__(self, *, module: AnsibleModule) -> None:
|
||||||
|
super().__init__(module=module)
|
||||||
|
|
||||||
|
self.comment: str | None = self.module.params["comment"]
|
||||||
|
self.private_key_path: str = self.module.params["path"]
|
||||||
|
self.public_key_path = self.private_key_path + ".pub"
|
||||||
|
self.regenerate: t.Literal[
|
||||||
|
"never", "fail", "partial_idempotence", "full_idempotence", "always"
|
||||||
|
] = (
|
||||||
|
self.module.params["regenerate"]
|
||||||
|
if not self.module.params["force"]
|
||||||
|
else "always"
|
||||||
|
)
|
||||||
|
self.state: t.Literal["present", "absent"] = self.module.params["state"]
|
||||||
|
self.type: t.Literal["rsa", "dsa", "rsa1", "ecdsa", "ed25519"] = (
|
||||||
|
self.module.params["type"]
|
||||||
|
)
|
||||||
|
|
||||||
|
self.size: int = self._get_size(self.module.params["size"])
|
||||||
|
self._validate_path()
|
||||||
|
|
||||||
|
self.original_private_key: PrivateKey | None = None
|
||||||
|
self.original_public_key: PublicKey | None = None
|
||||||
|
self.private_key: PrivateKey | None = None
|
||||||
|
self.public_key: PublicKey | None = None
|
||||||
|
|
||||||
|
def _get_size(self, size: int | None) -> int:
|
||||||
|
if self.type in ("rsa", "rsa1"):
|
||||||
|
result = 4096 if size is None else size
|
||||||
|
if result < 1024:
|
||||||
|
return self.module.fail_json(
|
||||||
|
msg="For RSA keys, the minimum size is 1024 bits and the default is 4096 bits. "
|
||||||
|
+ "Attempting to use bit lengths under 1024 will cause the module to fail."
|
||||||
|
)
|
||||||
|
elif self.type == "dsa":
|
||||||
|
result = 1024 if size is None else size
|
||||||
|
if result != 1024:
|
||||||
|
return self.module.fail_json(
|
||||||
|
msg="DSA keys must be exactly 1024 bits as specified by FIPS 186-2."
|
||||||
|
)
|
||||||
|
elif self.type == "ecdsa":
|
||||||
|
result = 256 if size is None else size
|
||||||
|
if result not in (256, 384, 521):
|
||||||
|
return self.module.fail_json(
|
||||||
|
msg="For ECDSA keys, size determines the key length by selecting from one of "
|
||||||
|
+ "three elliptic curve sizes: 256, 384 or 521 bits. "
|
||||||
|
+ "Attempting to use bit lengths other than these three values for ECDSA keys will "
|
||||||
|
+ "cause this module to fail."
|
||||||
|
)
|
||||||
|
elif self.type == "ed25519":
|
||||||
|
# User input is ignored for `key size` when `key type` is ed25519
|
||||||
|
result = 256
|
||||||
|
else:
|
||||||
|
return self.module.fail_json(
|
||||||
|
msg=f"{self.type} is not a valid value for key type"
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _validate_path(self) -> None:
|
||||||
|
self._check_if_base_dir(self.private_key_path)
|
||||||
|
|
||||||
|
if os.path.isdir(self.private_key_path):
|
||||||
|
self.module.fail_json(
|
||||||
|
msg=f"{self.private_key_path} is a directory. Please specify a path to a file."
|
||||||
|
)
|
||||||
|
|
||||||
|
def _execute(self) -> None:
|
||||||
|
self.original_private_key = self._load_private_key()
|
||||||
|
self.original_public_key = self._load_public_key()
|
||||||
|
|
||||||
|
if self.state == "present":
|
||||||
|
self._validate_key_load()
|
||||||
|
|
||||||
|
if self._should_generate():
|
||||||
|
self._generate()
|
||||||
|
elif not self._public_key_valid():
|
||||||
|
self._restore_public_key()
|
||||||
|
|
||||||
|
self.private_key = self._load_private_key()
|
||||||
|
self.public_key = self._load_public_key()
|
||||||
|
|
||||||
|
for path in (self.private_key_path, self.public_key_path):
|
||||||
|
self._update_permissions(path)
|
||||||
|
else:
|
||||||
|
if self._should_remove():
|
||||||
|
self._remove()
|
||||||
|
|
||||||
|
def _load_private_key(self) -> PrivateKey | None:
|
||||||
|
result = None
|
||||||
|
if self._private_key_exists():
|
||||||
|
try:
|
||||||
|
result = self._get_private_key()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _private_key_exists(self) -> bool:
|
||||||
|
return os.path.exists(self.private_key_path)
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_private_key(self) -> PrivateKey:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _load_public_key(self) -> PublicKey | None:
|
||||||
|
result = None
|
||||||
|
if self._public_key_exists():
|
||||||
|
try:
|
||||||
|
result = PublicKey.load(self.public_key_path)
|
||||||
|
except (IOError, OSError):
|
||||||
|
pass
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _public_key_exists(self) -> bool:
|
||||||
|
return os.path.exists(self.public_key_path)
|
||||||
|
|
||||||
|
def _validate_key_load(self) -> None:
|
||||||
|
if (
|
||||||
|
self._private_key_exists()
|
||||||
|
and self.regenerate in ("never", "fail", "partial_idempotence")
|
||||||
|
and (self.original_private_key is None or not self._private_key_readable())
|
||||||
|
):
|
||||||
|
self.module.fail_json(
|
||||||
|
msg="Unable to read the key. The key is protected with a passphrase or broken. "
|
||||||
|
+ "Will not proceed. To force regeneration, call the module with `generate` "
|
||||||
|
+ "set to `full_idempotence` or `always`, or with `force=true`."
|
||||||
|
)
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _private_key_readable(self) -> bool:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _should_generate(self) -> bool:
|
||||||
|
if self.original_private_key is None:
|
||||||
|
return True
|
||||||
|
if self.regenerate == "never":
|
||||||
|
return False
|
||||||
|
if self.regenerate == "fail":
|
||||||
|
if not self._private_key_valid():
|
||||||
|
self.module.fail_json(
|
||||||
|
msg="Key has wrong type and/or size. Will not proceed. "
|
||||||
|
+ "To force regeneration, call the module with `generate` set to "
|
||||||
|
+ "`partial_idempotence`, `full_idempotence` or `always`, or with `force=true`."
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
if self.regenerate in ("partial_idempotence", "full_idempotence"):
|
||||||
|
return not self._private_key_valid()
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _private_key_valid(self) -> bool:
|
||||||
|
if self.original_private_key is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return all(
|
||||||
|
[
|
||||||
|
self.size == self.original_private_key.size,
|
||||||
|
self.type == self.original_private_key.type,
|
||||||
|
self._private_key_valid_backend(self.original_private_key),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _private_key_valid_backend(self, original_private_key: PrivateKey) -> bool:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@OpensshModule.trigger_change
|
||||||
|
@OpensshModule.skip_if_check_mode
|
||||||
|
def _generate(self) -> None:
|
||||||
|
temp_private_key, temp_public_key = self._generate_temp_keypair()
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._safe_secure_move(
|
||||||
|
[
|
||||||
|
(temp_private_key, self.private_key_path),
|
||||||
|
(temp_public_key, self.public_key_path),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
except OSError as e:
|
||||||
|
self.module.fail_json(msg=str(e))
|
||||||
|
|
||||||
|
def _generate_temp_keypair(self) -> tuple[str, str]:
|
||||||
|
temp_private_key = os.path.join(
|
||||||
|
self.module.tmpdir, os.path.basename(self.private_key_path)
|
||||||
|
)
|
||||||
|
temp_public_key = temp_private_key + ".pub"
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._generate_keypair(temp_private_key)
|
||||||
|
except (IOError, OSError) as e:
|
||||||
|
self.module.fail_json(msg=str(e))
|
||||||
|
|
||||||
|
for f in (temp_private_key, temp_public_key):
|
||||||
|
self.module.add_cleanup_file(f)
|
||||||
|
|
||||||
|
return temp_private_key, temp_public_key
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _generate_keypair(self, private_key_path: str) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _public_key_valid(self) -> bool:
|
||||||
|
if self.original_public_key is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
valid_public_key = self._get_public_key()
|
||||||
|
if valid_public_key:
|
||||||
|
valid_public_key.comment = self.comment
|
||||||
|
|
||||||
|
return self.original_public_key == valid_public_key
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _get_public_key(self) -> PublicKey | t.Literal[""]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@OpensshModule.trigger_change
|
||||||
|
@OpensshModule.skip_if_check_mode
|
||||||
|
def _restore_public_key(self) -> None:
|
||||||
|
try:
|
||||||
|
temp_public_key = self._create_temp_public_key(
|
||||||
|
str(self._get_public_key()) + "\n"
|
||||||
|
)
|
||||||
|
self._safe_secure_move([(temp_public_key, self.public_key_path)])
|
||||||
|
except (IOError, OSError):
|
||||||
|
self.module.fail_json(
|
||||||
|
msg="The public key is missing or does not match the private key. "
|
||||||
|
+ "Unable to regenerate the public key."
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.comment:
|
||||||
|
self._update_comment()
|
||||||
|
|
||||||
|
def _create_temp_public_key(self, content: str | bytes) -> str:
|
||||||
|
temp_public_key = os.path.join(
|
||||||
|
self.module.tmpdir, os.path.basename(self.public_key_path)
|
||||||
|
)
|
||||||
|
|
||||||
|
default_permissions = 0o644
|
||||||
|
existing_permissions = file_mode(self.public_key_path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
secure_write(
|
||||||
|
path=temp_public_key,
|
||||||
|
mode=existing_permissions or default_permissions,
|
||||||
|
content=to_bytes(content),
|
||||||
|
)
|
||||||
|
except (IOError, OSError) as e:
|
||||||
|
self.module.fail_json(msg=str(e))
|
||||||
|
self.module.add_cleanup_file(temp_public_key)
|
||||||
|
|
||||||
|
return temp_public_key
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _update_comment(self) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _should_remove(self) -> bool:
|
||||||
|
return self._private_key_exists() or self._public_key_exists()
|
||||||
|
|
||||||
|
@OpensshModule.trigger_change
|
||||||
|
@OpensshModule.skip_if_check_mode
|
||||||
|
def _remove(self) -> None:
|
||||||
|
try:
|
||||||
|
if self._private_key_exists():
|
||||||
|
os.remove(self.private_key_path)
|
||||||
|
if self._public_key_exists():
|
||||||
|
os.remove(self.public_key_path)
|
||||||
|
except (IOError, OSError) as e:
|
||||||
|
self.module.fail_json(msg=str(e))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _result(self) -> dict[str, t.Any]:
|
||||||
|
private_key = self.private_key or self.original_private_key
|
||||||
|
public_key = self.public_key or self.original_public_key
|
||||||
|
|
||||||
|
return {
|
||||||
|
"size": self.size,
|
||||||
|
"type": self.type,
|
||||||
|
"filename": self.private_key_path,
|
||||||
|
"fingerprint": private_key.fingerprint if private_key else "",
|
||||||
|
"public_key": str(public_key) if public_key else "",
|
||||||
|
"comment": public_key.comment if public_key else "",
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def diff(self) -> dict[str, t.Any]:
|
||||||
|
before = (
|
||||||
|
self.original_private_key.to_dict() if self.original_private_key else {}
|
||||||
|
)
|
||||||
|
before.update(
|
||||||
|
self.original_public_key.to_dict() if self.original_public_key else {}
|
||||||
|
)
|
||||||
|
|
||||||
|
after = self.private_key.to_dict() if self.private_key else {}
|
||||||
|
after.update(self.public_key.to_dict() if self.public_key else {})
|
||||||
|
|
||||||
|
return {
|
||||||
|
"before": before,
|
||||||
|
"after": after,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class KeypairBackendOpensshBin(KeypairBackend):
|
||||||
|
def __init__(self, *, module: AnsibleModule) -> None:
|
||||||
|
super().__init__(module=module)
|
||||||
|
|
||||||
|
if self.module.params["private_key_format"] != "auto":
|
||||||
|
self.module.fail_json(
|
||||||
|
msg="'auto' is the only valid option for 'private_key_format' when 'backend' is not 'cryptography'"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.ssh_keygen = KeygenCommand(self.module)
|
||||||
|
|
||||||
|
def _generate_keypair(self, private_key_path: str) -> None:
|
||||||
|
self.ssh_keygen.generate_keypair(
|
||||||
|
private_key_path=private_key_path,
|
||||||
|
size=self.size,
|
||||||
|
key_type=self.type,
|
||||||
|
comment=self.comment,
|
||||||
|
check_rc=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_private_key(self) -> PrivateKey:
|
||||||
|
rc, private_key_content, err = self.ssh_keygen.get_private_key(
|
||||||
|
private_key_path=self.private_key_path, check_rc=False
|
||||||
|
)
|
||||||
|
if rc != 0:
|
||||||
|
raise ValueError(err)
|
||||||
|
return PrivateKey.from_string(private_key_content)
|
||||||
|
|
||||||
|
def _get_public_key(self) -> PublicKey | t.Literal[""]:
|
||||||
|
public_key_content = self.ssh_keygen.get_matching_public_key(
|
||||||
|
private_key_path=self.private_key_path, check_rc=True
|
||||||
|
)[1]
|
||||||
|
return PublicKey.from_string(public_key_content)
|
||||||
|
|
||||||
|
def _private_key_readable(self) -> bool:
|
||||||
|
rc, _stdout, stderr = self.ssh_keygen.get_matching_public_key(
|
||||||
|
private_key_path=self.private_key_path, check_rc=False
|
||||||
|
)
|
||||||
|
return not (
|
||||||
|
rc == 255
|
||||||
|
or any_in(
|
||||||
|
stderr,
|
||||||
|
"is not a public key file",
|
||||||
|
"incorrect passphrase",
|
||||||
|
"load failed",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _update_comment(self) -> None:
|
||||||
|
try:
|
||||||
|
ssh_version = self._get_ssh_version() or "7.8"
|
||||||
|
force_new_format = (
|
||||||
|
LooseVersion("6.5") <= LooseVersion(ssh_version) < LooseVersion("7.8")
|
||||||
|
)
|
||||||
|
self.ssh_keygen.update_comment(
|
||||||
|
private_key_path=self.private_key_path,
|
||||||
|
comment=self.comment or "",
|
||||||
|
force_new_format=force_new_format,
|
||||||
|
check_rc=True,
|
||||||
|
)
|
||||||
|
except (IOError, OSError) as e:
|
||||||
|
self.module.fail_json(msg=str(e))
|
||||||
|
|
||||||
|
def _private_key_valid_backend(self, original_private_key: PrivateKey) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class KeypairBackendCryptography(KeypairBackend):
|
||||||
|
def __init__(self, *, module: AnsibleModule) -> None:
|
||||||
|
super().__init__(module=module)
|
||||||
|
|
||||||
|
if self.type == "rsa1":
|
||||||
|
self.module.fail_json(
|
||||||
|
msg="RSA1 keys are not supported by the cryptography backend"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.passphrase = (
|
||||||
|
to_bytes(module.params["passphrase"])
|
||||||
|
if module.params["passphrase"]
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
key_format: t.Literal["auto", "pkcs1", "pkcs8", "ssh"] = module.params[
|
||||||
|
"private_key_format"
|
||||||
|
]
|
||||||
|
self.private_key_format = self._get_key_format(key_format)
|
||||||
|
|
||||||
|
def _get_key_format(
|
||||||
|
self, key_format: t.Literal["auto", "pkcs1", "pkcs8", "ssh"]
|
||||||
|
) -> t.Literal["SSH", "PKCS1", "PKCS8"]:
|
||||||
|
result: t.Literal["SSH", "PKCS1", "PKCS8"] = "SSH"
|
||||||
|
|
||||||
|
if key_format == "auto":
|
||||||
|
# Default to OpenSSH 7.8 compatibility when OpenSSH is not installed
|
||||||
|
ssh_version = self._get_ssh_version() or "7.8"
|
||||||
|
|
||||||
|
if (
|
||||||
|
LooseVersion(ssh_version) < LooseVersion("7.8")
|
||||||
|
and self.type != "ed25519"
|
||||||
|
):
|
||||||
|
# OpenSSH made SSH formatted private keys available in version 6.5,
|
||||||
|
# but still defaulted to PKCS1 format with the exception of ed25519 keys
|
||||||
|
result = "PKCS1"
|
||||||
|
else:
|
||||||
|
result = key_format.upper() # type: ignore
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _generate_keypair(self, private_key_path: str) -> None:
|
||||||
|
assert self.type != "rsa1"
|
||||||
|
keypair = OpensshKeypair.generate(
|
||||||
|
keytype=self.type,
|
||||||
|
size=self.size,
|
||||||
|
passphrase=self.passphrase,
|
||||||
|
comment=self.comment or "",
|
||||||
|
)
|
||||||
|
|
||||||
|
encoded_private_key = OpensshKeypair.encode_openssh_privatekey(
|
||||||
|
asym_keypair=keypair.asymmetric_keypair, key_format=self.private_key_format
|
||||||
|
)
|
||||||
|
secure_write(path=private_key_path, mode=0o600, content=encoded_private_key)
|
||||||
|
|
||||||
|
public_key_path = private_key_path + ".pub"
|
||||||
|
secure_write(path=public_key_path, mode=0o644, content=keypair.public_key)
|
||||||
|
|
||||||
|
def _get_private_key(self) -> PrivateKey:
|
||||||
|
keypair = OpensshKeypair.load(
|
||||||
|
path=self.private_key_path, passphrase=self.passphrase, no_public_key=True
|
||||||
|
)
|
||||||
|
|
||||||
|
return PrivateKey(
|
||||||
|
size=keypair.size,
|
||||||
|
key_type=keypair.key_type,
|
||||||
|
fingerprint=keypair.fingerprint,
|
||||||
|
key_format=parse_private_key_format(path=self.private_key_path),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_public_key(self) -> PublicKey | t.Literal[""]:
|
||||||
|
try:
|
||||||
|
keypair = OpensshKeypair.load(
|
||||||
|
path=self.private_key_path,
|
||||||
|
passphrase=self.passphrase,
|
||||||
|
no_public_key=True,
|
||||||
|
)
|
||||||
|
except OpenSSHError:
|
||||||
|
# Simulates the null output of ssh-keygen
|
||||||
|
return ""
|
||||||
|
|
||||||
|
return PublicKey.from_string(to_text(keypair.public_key))
|
||||||
|
|
||||||
|
def _private_key_readable(self) -> bool:
|
||||||
|
try:
|
||||||
|
OpensshKeypair.load(
|
||||||
|
path=self.private_key_path,
|
||||||
|
passphrase=self.passphrase,
|
||||||
|
no_public_key=True,
|
||||||
|
)
|
||||||
|
except (InvalidPrivateKeyFileError, InvalidPassphraseError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Cryptography >= 3.0 uses a SSH key loader which does not raise an exception when a passphrase is provided
|
||||||
|
# when loading an unencrypted key
|
||||||
|
if self.passphrase:
|
||||||
|
try:
|
||||||
|
OpensshKeypair.load(
|
||||||
|
path=self.private_key_path, passphrase=None, no_public_key=True
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
except (InvalidPrivateKeyFileError, InvalidPassphraseError):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _update_comment(self) -> None:
|
||||||
|
keypair = OpensshKeypair.load(
|
||||||
|
path=self.private_key_path, passphrase=self.passphrase, no_public_key=True
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
keypair.comment = self.comment
|
||||||
|
except InvalidCommentError as e:
|
||||||
|
self.module.fail_json(msg=str(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
temp_public_key = self._create_temp_public_key(keypair.public_key + b"\n")
|
||||||
|
self._safe_secure_move([(temp_public_key, self.public_key_path)])
|
||||||
|
except (IOError, OSError) as e:
|
||||||
|
self.module.fail_json(msg=str(e))
|
||||||
|
|
||||||
|
def _private_key_valid_backend(self, original_private_key: PrivateKey) -> bool:
|
||||||
|
# avoids breaking behavior and prevents
|
||||||
|
# automatic conversions with OpenSSH upgrades
|
||||||
|
if self.module.params["private_key_format"] == "auto":
|
||||||
|
return True
|
||||||
|
|
||||||
|
return self.private_key_format == original_private_key.format
|
||||||
|
|
||||||
|
|
||||||
|
def select_backend(
|
||||||
|
*, module: AnsibleModule, backend: t.Literal["auto", "opensshbin", "cryptography"]
|
||||||
|
) -> KeypairBackend:
|
||||||
|
can_use_cryptography = HAS_OPENSSH_SUPPORT and LooseVersion(
|
||||||
|
CRYPTOGRAPHY_VERSION
|
||||||
|
) >= LooseVersion(COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION)
|
||||||
|
can_use_opensshbin = bool(module.get_bin_path("ssh-keygen"))
|
||||||
|
|
||||||
|
if backend == "auto":
|
||||||
|
if can_use_opensshbin and not module.params["passphrase"]:
|
||||||
|
backend = "opensshbin"
|
||||||
|
elif can_use_cryptography:
|
||||||
|
backend = "cryptography"
|
||||||
|
else:
|
||||||
|
module.fail_json(
|
||||||
|
msg=(
|
||||||
|
"Cannot find either the OpenSSH binary in the PATH "
|
||||||
|
f"or cryptography >= {COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION} installed on this system"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if backend == "opensshbin":
|
||||||
|
if not can_use_opensshbin:
|
||||||
|
module.fail_json(msg="Cannot find the OpenSSH binary in the PATH")
|
||||||
|
return KeypairBackendOpensshBin(module=module)
|
||||||
|
if backend == "cryptography":
|
||||||
|
if not can_use_cryptography:
|
||||||
|
module.fail_json(
|
||||||
|
msg=missing_required_lib(
|
||||||
|
f"cryptography >= {COLLECTION_MINIMUM_CRYPTOGRAPHY_VERSION}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return KeypairBackendCryptography(module=module)
|
||||||
|
raise ValueError(f"Unsupported value for backend: {backend}")
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("KeypairBackend", "select_backend")
|
||||||
827
plugins/module_utils/_openssh/certificate.py
Normal file
827
plugins/module_utils/_openssh/certificate.py
Normal file
@@ -0,0 +1,827 @@
|
|||||||
|
# Copyright (c) 2021, Andrew Pantuso (@ajpantuso) <ajpantuso@gmail.com>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import binascii
|
||||||
|
import datetime as _datetime
|
||||||
|
import os
|
||||||
|
import typing as t
|
||||||
|
from base64 import b64encode
|
||||||
|
from datetime import datetime
|
||||||
|
from hashlib import sha256
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._openssh.utils import (
|
||||||
|
OpensshParser,
|
||||||
|
_OpensshWriter,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._time import UTC as _UTC
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._time import (
|
||||||
|
add_or_remove_timezone as _add_or_remove_timezone,
|
||||||
|
)
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._time import (
|
||||||
|
convert_relative_to_datetime,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._openssh.cryptography import (
|
||||||
|
KeyType,
|
||||||
|
)
|
||||||
|
|
||||||
|
DateFormat = t.Literal["human_readable", "openssh", "timestamp"]
|
||||||
|
DateFormatStr = t.Literal["human_readable", "openssh"]
|
||||||
|
DateFormatInt = t.Literal["timestamp"]
|
||||||
|
else:
|
||||||
|
KeyType = None
|
||||||
|
|
||||||
|
|
||||||
|
# Protocol References
|
||||||
|
# -------------------
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc4251
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc4253
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc5656
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc8032
|
||||||
|
# https://cvsweb.openbsd.org/src/usr.bin/ssh/PROTOCOL.certkeys?annotate=HEAD
|
||||||
|
#
|
||||||
|
# Inspired by:
|
||||||
|
# ------------
|
||||||
|
# https://github.com/pyca/cryptography/blob/main/src/cryptography/hazmat/primitives/serialization/ssh.py
|
||||||
|
# https://github.com/paramiko/paramiko/blob/master/paramiko/message.py
|
||||||
|
|
||||||
|
|
||||||
|
# See https://cvsweb.openbsd.org/src/usr.bin/ssh/PROTOCOL.certkeys?annotate=HEAD
|
||||||
|
_USER_TYPE = 1
|
||||||
|
_HOST_TYPE = 2
|
||||||
|
|
||||||
|
_SSH_TYPE_STRINGS: dict[KeyType | str, bytes] = {
|
||||||
|
"rsa": b"ssh-rsa",
|
||||||
|
"dsa": b"ssh-dss",
|
||||||
|
"ecdsa-nistp256": b"ecdsa-sha2-nistp256",
|
||||||
|
"ecdsa-nistp384": b"ecdsa-sha2-nistp384",
|
||||||
|
"ecdsa-nistp521": b"ecdsa-sha2-nistp521",
|
||||||
|
"ed25519": b"ssh-ed25519",
|
||||||
|
}
|
||||||
|
_CERT_SUFFIX_V01 = b"-cert-v01@openssh.com"
|
||||||
|
|
||||||
|
# See https://datatracker.ietf.org/doc/html/rfc5656#section-6.1
|
||||||
|
_ECDSA_CURVE_IDENTIFIERS = {
|
||||||
|
"ecdsa-nistp256": b"nistp256",
|
||||||
|
"ecdsa-nistp384": b"nistp384",
|
||||||
|
"ecdsa-nistp521": b"nistp521",
|
||||||
|
}
|
||||||
|
_ECDSA_CURVE_IDENTIFIERS_LOOKUP = {
|
||||||
|
b"nistp256": "ecdsa-nistp256",
|
||||||
|
b"nistp384": "ecdsa-nistp384",
|
||||||
|
b"nistp521": "ecdsa-nistp521",
|
||||||
|
}
|
||||||
|
|
||||||
|
_ALWAYS = _add_or_remove_timezone(datetime(1970, 1, 1), with_timezone=True)
|
||||||
|
_FOREVER = datetime(9999, 12, 31, 23, 59, 59, 999999, _UTC)
|
||||||
|
|
||||||
|
_CRITICAL_OPTIONS = (
|
||||||
|
"force-command",
|
||||||
|
"source-address",
|
||||||
|
"verify-required",
|
||||||
|
)
|
||||||
|
|
||||||
|
_DIRECTIVES = (
|
||||||
|
"clear",
|
||||||
|
"no-x11-forwarding",
|
||||||
|
"no-agent-forwarding",
|
||||||
|
"no-port-forwarding",
|
||||||
|
"no-pty",
|
||||||
|
"no-user-rc",
|
||||||
|
)
|
||||||
|
|
||||||
|
_EXTENSIONS = (
|
||||||
|
"permit-x11-forwarding",
|
||||||
|
"permit-agent-forwarding",
|
||||||
|
"permit-port-forwarding",
|
||||||
|
"permit-pty",
|
||||||
|
"permit-user-rc",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class OpensshCertificateTimeParameters:
|
||||||
|
def __init__(
|
||||||
|
self, *, valid_from: str | bytes | int, valid_to: str | bytes | int
|
||||||
|
) -> None:
|
||||||
|
self._valid_from = self.to_datetime(valid_from)
|
||||||
|
self._valid_to = self.to_datetime(valid_to)
|
||||||
|
|
||||||
|
if self._valid_from > self._valid_to:
|
||||||
|
raise ValueError(
|
||||||
|
f"Valid from: {valid_from!r} must not be greater than Valid to: {valid_to!r}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def __eq__(self, other: object) -> bool:
|
||||||
|
if not isinstance(other, type(self)):
|
||||||
|
return NotImplemented
|
||||||
|
return (
|
||||||
|
self._valid_from == other._valid_from and self._valid_to == other._valid_to
|
||||||
|
)
|
||||||
|
|
||||||
|
def __ne__(self, other: object) -> bool:
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
@property
|
||||||
|
def validity_string(self) -> str:
|
||||||
|
if not (self._valid_from == _ALWAYS and self._valid_to == _FOREVER):
|
||||||
|
return f"{self.valid_from(date_format='openssh')}:{self.valid_to(date_format='openssh')}"
|
||||||
|
return ""
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def valid_from(self, date_format: DateFormatStr) -> str: ...
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def valid_from(self, date_format: DateFormatInt) -> int: ...
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def valid_from(self, date_format: DateFormat) -> str | int: ...
|
||||||
|
|
||||||
|
def valid_from(self, date_format: DateFormat) -> str | int:
|
||||||
|
return self.format_datetime(self._valid_from, date_format=date_format)
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def valid_to(self, date_format: DateFormatStr) -> str: ...
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def valid_to(self, date_format: DateFormatInt) -> int: ...
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
def valid_to(self, date_format: DateFormat) -> str | int: ...
|
||||||
|
|
||||||
|
def valid_to(self, date_format: DateFormat) -> str | int:
|
||||||
|
return self.format_datetime(self._valid_to, date_format=date_format)
|
||||||
|
|
||||||
|
def within_range(self, valid_at: str | bytes | int | None) -> bool:
|
||||||
|
if valid_at is not None:
|
||||||
|
valid_at_datetime = self.to_datetime(valid_at)
|
||||||
|
return self._valid_from <= valid_at_datetime <= self._valid_to
|
||||||
|
return True
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
@staticmethod
|
||||||
|
def format_datetime(dt: datetime, *, date_format: DateFormatStr) -> str: ...
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
@staticmethod
|
||||||
|
def format_datetime(dt: datetime, *, date_format: DateFormatInt) -> int: ...
|
||||||
|
|
||||||
|
@t.overload
|
||||||
|
@staticmethod
|
||||||
|
def format_datetime(dt: datetime, *, date_format: DateFormat) -> str | int: ...
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def format_datetime(dt: datetime, *, date_format: DateFormat) -> str | int:
|
||||||
|
if date_format in ("human_readable", "openssh"):
|
||||||
|
if dt == _ALWAYS:
|
||||||
|
return "always"
|
||||||
|
if dt == _FOREVER:
|
||||||
|
return "forever"
|
||||||
|
return (
|
||||||
|
dt.isoformat().replace("+00:00", "")
|
||||||
|
if date_format == "human_readable"
|
||||||
|
else dt.strftime("%Y%m%d%H%M%S")
|
||||||
|
)
|
||||||
|
if date_format == "timestamp":
|
||||||
|
td = dt - _ALWAYS
|
||||||
|
return int(
|
||||||
|
(td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6
|
||||||
|
)
|
||||||
|
raise ValueError(f"{date_format} is not a valid format")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def to_datetime(time_string_or_timestamp: str | bytes | int) -> datetime:
|
||||||
|
if isinstance(time_string_or_timestamp, (str, bytes)):
|
||||||
|
return OpensshCertificateTimeParameters._time_string_to_datetime(
|
||||||
|
to_text(time_string_or_timestamp.strip())
|
||||||
|
)
|
||||||
|
if isinstance(time_string_or_timestamp, int):
|
||||||
|
return OpensshCertificateTimeParameters._timestamp_to_datetime(
|
||||||
|
time_string_or_timestamp
|
||||||
|
)
|
||||||
|
raise ValueError(
|
||||||
|
f"Value must be of type (str, unicode, int) not {type(time_string_or_timestamp)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _timestamp_to_datetime(timestamp: int) -> datetime:
|
||||||
|
if timestamp == 0x0:
|
||||||
|
return _ALWAYS
|
||||||
|
if timestamp == 0xFFFFFFFFFFFFFFFF:
|
||||||
|
return _FOREVER
|
||||||
|
try:
|
||||||
|
return datetime.fromtimestamp(timestamp, tz=_datetime.timezone.utc)
|
||||||
|
except OverflowError as e:
|
||||||
|
raise ValueError from e
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _time_string_to_datetime(time_string: str) -> datetime:
|
||||||
|
if time_string == "always":
|
||||||
|
return _ALWAYS
|
||||||
|
if time_string == "forever":
|
||||||
|
return _FOREVER
|
||||||
|
if is_relative_time_string(time_string):
|
||||||
|
result = convert_relative_to_datetime(time_string, with_timezone=True)
|
||||||
|
if result is None:
|
||||||
|
raise ValueError
|
||||||
|
return result
|
||||||
|
result = None
|
||||||
|
for time_format in ("%Y-%m-%d", "%Y-%m-%d %H:%M:%S", "%Y-%m-%dT%H:%M:%S"):
|
||||||
|
try:
|
||||||
|
result = _add_or_remove_timezone(
|
||||||
|
datetime.strptime(time_string, time_format),
|
||||||
|
with_timezone=True,
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
if result is None:
|
||||||
|
raise ValueError
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
_OpensshCertificateOption = t.TypeVar(
|
||||||
|
"_OpensshCertificateOption", bound="OpensshCertificateOption"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class OpensshCertificateOption:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
option_type: t.Literal["critical", "extension"],
|
||||||
|
name: str | bytes,
|
||||||
|
data: str | bytes,
|
||||||
|
):
|
||||||
|
if option_type not in ("critical", "extension"):
|
||||||
|
raise ValueError("type must be either 'critical' or 'extension'")
|
||||||
|
|
||||||
|
if not isinstance(name, (str, bytes)):
|
||||||
|
raise TypeError(f"name must be a string not {type(name)}")
|
||||||
|
|
||||||
|
if not isinstance(data, (str, bytes)):
|
||||||
|
raise TypeError(f"data must be a string not {type(data)}")
|
||||||
|
|
||||||
|
self._option_type = option_type
|
||||||
|
self._name = name.lower()
|
||||||
|
self._data = data
|
||||||
|
|
||||||
|
def __eq__(self, other: object) -> bool:
|
||||||
|
if not isinstance(other, type(self)):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return all(
|
||||||
|
[
|
||||||
|
self._option_type == other._option_type,
|
||||||
|
self._name == other._name,
|
||||||
|
self._data == other._data,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
return hash((self._option_type, self._name, self._data))
|
||||||
|
|
||||||
|
def __ne__(self, other: object) -> bool:
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
if self._data:
|
||||||
|
return f"{self._name!r}={self._data!r}"
|
||||||
|
return f"{self._name!r}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self) -> str | bytes:
|
||||||
|
return self._data
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str | bytes:
|
||||||
|
return self._name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type(self) -> t.Literal["critical", "extension"]:
|
||||||
|
return self._option_type
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_string(
|
||||||
|
cls: t.Type[_OpensshCertificateOption], option_string: str
|
||||||
|
) -> _OpensshCertificateOption:
|
||||||
|
if not isinstance(option_string, str):
|
||||||
|
raise ValueError(
|
||||||
|
f"option_string must be a string not {type(option_string)}"
|
||||||
|
)
|
||||||
|
option_type = None
|
||||||
|
|
||||||
|
if ":" in option_string:
|
||||||
|
option_type, value = option_string.strip().split(":", 1)
|
||||||
|
if "=" in value:
|
||||||
|
name, data = value.split("=", 1)
|
||||||
|
else:
|
||||||
|
name, data = value, ""
|
||||||
|
elif "=" in option_string:
|
||||||
|
name, data = option_string.strip().split("=", 1)
|
||||||
|
else:
|
||||||
|
name, data = option_string.strip(), ""
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
# We have str, but we're expecting a specific literal:
|
||||||
|
option_type=option_type or get_option_type(name.lower()), # type: ignore
|
||||||
|
name=name,
|
||||||
|
data=data,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class OpensshCertificateInfo(metaclass=abc.ABCMeta):
|
||||||
|
"""Encapsulates all certificate information which is signed by a CA key"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
nonce: bytes | None = None,
|
||||||
|
serial: int | None = None,
|
||||||
|
cert_type: int | None = None,
|
||||||
|
key_id: bytes | None = None,
|
||||||
|
principals: list[bytes] | None = None,
|
||||||
|
valid_after: int | None = None,
|
||||||
|
valid_before: int | None = None,
|
||||||
|
critical_options: list[tuple[bytes, bytes]] | None = None,
|
||||||
|
extensions: list[tuple[bytes, bytes]] | None = None,
|
||||||
|
reserved: bytes | None = None,
|
||||||
|
signing_key: bytes | None = None,
|
||||||
|
):
|
||||||
|
self.nonce = nonce
|
||||||
|
self.serial = serial
|
||||||
|
self._cert_type: int | None = cert_type
|
||||||
|
self.key_id = key_id
|
||||||
|
self.principals = principals
|
||||||
|
self.valid_after = valid_after
|
||||||
|
self.valid_before = valid_before
|
||||||
|
self.critical_options = critical_options
|
||||||
|
self.extensions = extensions
|
||||||
|
self.reserved = reserved
|
||||||
|
self.signing_key = signing_key
|
||||||
|
|
||||||
|
self.type_string: bytes | None = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cert_type(self) -> t.Literal["user", "host", ""]:
|
||||||
|
if self._cert_type == _USER_TYPE:
|
||||||
|
return "user"
|
||||||
|
if self._cert_type == _HOST_TYPE:
|
||||||
|
return "host"
|
||||||
|
return ""
|
||||||
|
|
||||||
|
@cert_type.setter
|
||||||
|
def cert_type(self, cert_type: t.Literal["user", "host"] | int) -> None:
|
||||||
|
if cert_type in ("user", _USER_TYPE):
|
||||||
|
self._cert_type = _USER_TYPE
|
||||||
|
elif cert_type in ("host", _HOST_TYPE):
|
||||||
|
self._cert_type = _HOST_TYPE
|
||||||
|
else:
|
||||||
|
raise ValueError(f"{cert_type} is not a valid certificate type")
|
||||||
|
|
||||||
|
def signing_key_fingerprint(self) -> bytes:
|
||||||
|
if self.signing_key is None:
|
||||||
|
raise ValueError("signing_key not present")
|
||||||
|
return fingerprint(self.signing_key)
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def public_key_fingerprint(self) -> bytes:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def parse_public_numbers(self, parser: OpensshParser) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class OpensshRSACertificateInfo(OpensshCertificateInfo):
|
||||||
|
def __init__(self, *, e: int | None = None, n: int | None = None, **kwargs) -> None:
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
self.type_string = _SSH_TYPE_STRINGS["rsa"] + _CERT_SUFFIX_V01
|
||||||
|
self.e = e
|
||||||
|
self.n = n
|
||||||
|
|
||||||
|
# See https://datatracker.ietf.org/doc/html/rfc4253#section-6.6
|
||||||
|
def public_key_fingerprint(self) -> bytes:
|
||||||
|
if self.e is None or self.n is None:
|
||||||
|
return b""
|
||||||
|
|
||||||
|
writer = _OpensshWriter()
|
||||||
|
writer.string(_SSH_TYPE_STRINGS["rsa"])
|
||||||
|
writer.mpint(self.e)
|
||||||
|
writer.mpint(self.n)
|
||||||
|
|
||||||
|
return fingerprint(writer.bytes())
|
||||||
|
|
||||||
|
def parse_public_numbers(self, parser: OpensshParser) -> None:
|
||||||
|
self.e = parser.mpint()
|
||||||
|
self.n = parser.mpint()
|
||||||
|
|
||||||
|
|
||||||
|
class OpensshDSACertificateInfo(OpensshCertificateInfo):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
p: int | None = None,
|
||||||
|
q: int | None = None,
|
||||||
|
g: int | None = None,
|
||||||
|
y: int | None = None,
|
||||||
|
**kwargs,
|
||||||
|
) -> None:
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
self.type_string = _SSH_TYPE_STRINGS["dsa"] + _CERT_SUFFIX_V01
|
||||||
|
self.p = p
|
||||||
|
self.q = q
|
||||||
|
self.g = g
|
||||||
|
self.y = y
|
||||||
|
|
||||||
|
# See https://datatracker.ietf.org/doc/html/rfc4253#section-6.6
|
||||||
|
def public_key_fingerprint(self) -> bytes:
|
||||||
|
if self.p is None or self.q is None or self.g is None or self.y is None:
|
||||||
|
return b""
|
||||||
|
|
||||||
|
writer = _OpensshWriter()
|
||||||
|
writer.string(_SSH_TYPE_STRINGS["dsa"])
|
||||||
|
writer.mpint(self.p)
|
||||||
|
writer.mpint(self.q)
|
||||||
|
writer.mpint(self.g)
|
||||||
|
writer.mpint(self.y)
|
||||||
|
|
||||||
|
return fingerprint(writer.bytes())
|
||||||
|
|
||||||
|
def parse_public_numbers(self, parser: OpensshParser) -> None:
|
||||||
|
self.p = parser.mpint()
|
||||||
|
self.q = parser.mpint()
|
||||||
|
self.g = parser.mpint()
|
||||||
|
self.y = parser.mpint()
|
||||||
|
|
||||||
|
|
||||||
|
class OpensshECDSACertificateInfo(OpensshCertificateInfo):
|
||||||
|
def __init__(
|
||||||
|
self, *, curve: bytes | None = None, public_key: bytes | None = None, **kwargs
|
||||||
|
):
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
self._curve = None
|
||||||
|
if curve is not None:
|
||||||
|
self.curve = curve
|
||||||
|
|
||||||
|
self.public_key = public_key
|
||||||
|
|
||||||
|
@property
|
||||||
|
def curve(self) -> bytes | None:
|
||||||
|
return self._curve
|
||||||
|
|
||||||
|
@curve.setter
|
||||||
|
def curve(self, curve: bytes) -> None:
|
||||||
|
if curve in _ECDSA_CURVE_IDENTIFIERS.values():
|
||||||
|
self._curve = curve
|
||||||
|
self.type_string = (
|
||||||
|
_SSH_TYPE_STRINGS[_ECDSA_CURVE_IDENTIFIERS_LOOKUP[curve]]
|
||||||
|
+ _CERT_SUFFIX_V01
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"Curve must be one of {(b','.join(_ECDSA_CURVE_IDENTIFIERS.values())).decode('UTF-8')}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# See https://datatracker.ietf.org/doc/html/rfc4253#section-6.6
|
||||||
|
def public_key_fingerprint(self) -> bytes:
|
||||||
|
if self.curve is None or self.public_key is None:
|
||||||
|
return b""
|
||||||
|
|
||||||
|
writer = _OpensshWriter()
|
||||||
|
writer.string(_SSH_TYPE_STRINGS[_ECDSA_CURVE_IDENTIFIERS_LOOKUP[self.curve]])
|
||||||
|
writer.string(self.curve)
|
||||||
|
writer.string(self.public_key)
|
||||||
|
|
||||||
|
return fingerprint(writer.bytes())
|
||||||
|
|
||||||
|
def parse_public_numbers(self, parser: OpensshParser) -> None:
|
||||||
|
self.curve = parser.string()
|
||||||
|
self.public_key = parser.string()
|
||||||
|
|
||||||
|
|
||||||
|
class OpensshED25519CertificateInfo(OpensshCertificateInfo):
|
||||||
|
def __init__(self, *, pk: bytes | None = None, **kwargs) -> None:
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
self.type_string = _SSH_TYPE_STRINGS["ed25519"] + _CERT_SUFFIX_V01
|
||||||
|
self.pk = pk
|
||||||
|
|
||||||
|
def public_key_fingerprint(self) -> bytes:
|
||||||
|
if self.pk is None:
|
||||||
|
return b""
|
||||||
|
|
||||||
|
writer = _OpensshWriter()
|
||||||
|
writer.string(_SSH_TYPE_STRINGS["ed25519"])
|
||||||
|
writer.string(self.pk)
|
||||||
|
|
||||||
|
return fingerprint(writer.bytes())
|
||||||
|
|
||||||
|
def parse_public_numbers(self, parser: OpensshParser) -> None:
|
||||||
|
self.pk = parser.string()
|
||||||
|
|
||||||
|
|
||||||
|
_OpensshCertificate = t.TypeVar("_OpensshCertificate", bound="OpensshCertificate")
|
||||||
|
|
||||||
|
|
||||||
|
# See https://cvsweb.openbsd.org/src/usr.bin/ssh/PROTOCOL.certkeys?annotate=HEAD
|
||||||
|
class OpensshCertificate:
|
||||||
|
"""Encapsulates a formatted OpenSSH certificate including signature and signing key"""
|
||||||
|
|
||||||
|
def __init__(self, *, cert_info: OpensshCertificateInfo, signature: bytes):
|
||||||
|
self._cert_info = cert_info
|
||||||
|
self.signature = signature
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def load(
|
||||||
|
cls: t.Type[_OpensshCertificate], path: str | os.PathLike
|
||||||
|
) -> _OpensshCertificate:
|
||||||
|
if not os.path.exists(path):
|
||||||
|
raise ValueError(f"{path} is not a valid path.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(path, "rb") as cert_file:
|
||||||
|
data = cert_file.read()
|
||||||
|
except (IOError, OSError) as e:
|
||||||
|
raise ValueError(f"{path} cannot be opened for reading: {e}") from e
|
||||||
|
|
||||||
|
try:
|
||||||
|
format_identifier, b64_cert = data.split(b" ")[:2]
|
||||||
|
cert = binascii.a2b_base64(b64_cert)
|
||||||
|
except (binascii.Error, ValueError) as e:
|
||||||
|
raise ValueError("Certificate not in OpenSSH format") from e
|
||||||
|
|
||||||
|
for key_type, string in _SSH_TYPE_STRINGS.items():
|
||||||
|
if format_identifier == string + _CERT_SUFFIX_V01:
|
||||||
|
pub_key_type = t.cast(KeyType, key_type)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid certificate format identifier: {format_identifier!r}"
|
||||||
|
)
|
||||||
|
|
||||||
|
parser = OpensshParser(data=cert)
|
||||||
|
|
||||||
|
if format_identifier != parser.string():
|
||||||
|
raise ValueError("Certificate formats do not match")
|
||||||
|
|
||||||
|
try:
|
||||||
|
cert_info = cls._parse_cert_info(pub_key_type, parser)
|
||||||
|
signature = parser.string()
|
||||||
|
except (TypeError, ValueError) as e:
|
||||||
|
raise ValueError(f"Invalid certificate data: {e}") from e
|
||||||
|
|
||||||
|
if parser.remaining_bytes():
|
||||||
|
raise ValueError(
|
||||||
|
f"{parser.remaining_bytes()} bytes of additional data was not parsed while loading {path}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
cert_info=cert_info,
|
||||||
|
signature=signature,
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type_string(self) -> str:
|
||||||
|
return to_text(self._cert_info.type_string)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def nonce(self) -> bytes:
|
||||||
|
if self._cert_info.nonce is None:
|
||||||
|
raise ValueError
|
||||||
|
return self._cert_info.nonce
|
||||||
|
|
||||||
|
@property
|
||||||
|
def public_key(self) -> str:
|
||||||
|
return to_text(self._cert_info.public_key_fingerprint())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serial(self) -> int:
|
||||||
|
if self._cert_info.serial is None:
|
||||||
|
raise ValueError
|
||||||
|
return self._cert_info.serial
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type(self) -> t.Literal["user", "host"]:
|
||||||
|
result = self._cert_info.cert_type
|
||||||
|
if result == "":
|
||||||
|
raise ValueError
|
||||||
|
return result
|
||||||
|
|
||||||
|
@property
|
||||||
|
def key_id(self) -> str:
|
||||||
|
return to_text(self._cert_info.key_id)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def principals(self) -> list[str]:
|
||||||
|
if self._cert_info.principals is None:
|
||||||
|
raise ValueError
|
||||||
|
return [to_text(p) for p in self._cert_info.principals]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def valid_after(self) -> int:
|
||||||
|
if self._cert_info.valid_after is None:
|
||||||
|
raise ValueError
|
||||||
|
return self._cert_info.valid_after
|
||||||
|
|
||||||
|
@property
|
||||||
|
def valid_before(self) -> int:
|
||||||
|
if self._cert_info.valid_before is None:
|
||||||
|
raise ValueError
|
||||||
|
return self._cert_info.valid_before
|
||||||
|
|
||||||
|
@property
|
||||||
|
def critical_options(self) -> list[OpensshCertificateOption]:
|
||||||
|
if self._cert_info.critical_options is None:
|
||||||
|
raise ValueError
|
||||||
|
return [
|
||||||
|
OpensshCertificateOption(
|
||||||
|
option_type="critical", name=to_text(n), data=to_text(d)
|
||||||
|
)
|
||||||
|
for n, d in self._cert_info.critical_options
|
||||||
|
]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extensions(self) -> list[OpensshCertificateOption]:
|
||||||
|
if self._cert_info.extensions is None:
|
||||||
|
raise ValueError
|
||||||
|
return [
|
||||||
|
OpensshCertificateOption(
|
||||||
|
option_type="extension", name=to_text(n), data=to_text(d)
|
||||||
|
)
|
||||||
|
for n, d in self._cert_info.extensions
|
||||||
|
]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def reserved(self) -> bytes:
|
||||||
|
if self._cert_info.reserved is None:
|
||||||
|
raise ValueError
|
||||||
|
return self._cert_info.reserved
|
||||||
|
|
||||||
|
@property
|
||||||
|
def signing_key(self) -> str:
|
||||||
|
return to_text(self._cert_info.signing_key_fingerprint())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def signature_type(self) -> str:
|
||||||
|
signature_data = OpensshParser.signature_data(signature_string=self.signature)
|
||||||
|
return to_text(signature_data["signature_type"])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_cert_info(
|
||||||
|
pub_key_type: KeyType, parser: OpensshParser
|
||||||
|
) -> OpensshCertificateInfo:
|
||||||
|
cert_info = get_cert_info_object(pub_key_type)
|
||||||
|
cert_info.nonce = parser.string()
|
||||||
|
cert_info.parse_public_numbers(parser)
|
||||||
|
cert_info.serial = parser.uint64()
|
||||||
|
# mypy doesn't understand that the setter accepts other types than the getter:
|
||||||
|
cert_info.cert_type = parser.uint32() # type: ignore
|
||||||
|
cert_info.key_id = parser.string()
|
||||||
|
cert_info.principals = parser.string_list()
|
||||||
|
cert_info.valid_after = parser.uint64()
|
||||||
|
cert_info.valid_before = parser.uint64()
|
||||||
|
cert_info.critical_options = parser.option_list()
|
||||||
|
cert_info.extensions = parser.option_list()
|
||||||
|
cert_info.reserved = parser.string()
|
||||||
|
cert_info.signing_key = parser.string()
|
||||||
|
|
||||||
|
return cert_info
|
||||||
|
|
||||||
|
def to_dict(self) -> dict[str, t.Any]:
|
||||||
|
time_parameters = OpensshCertificateTimeParameters(
|
||||||
|
valid_from=self.valid_after, valid_to=self.valid_before
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"type_string": self.type_string,
|
||||||
|
"nonce": self.nonce,
|
||||||
|
"serial": self.serial,
|
||||||
|
"cert_type": self.type,
|
||||||
|
"identifier": self.key_id,
|
||||||
|
"principals": self.principals,
|
||||||
|
"valid_after": time_parameters.valid_from(date_format="human_readable"),
|
||||||
|
"valid_before": time_parameters.valid_to(date_format="human_readable"),
|
||||||
|
"critical_options": [
|
||||||
|
str(critical_option) for critical_option in self.critical_options
|
||||||
|
],
|
||||||
|
"extensions": [str(extension) for extension in self.extensions],
|
||||||
|
"reserved": self.reserved,
|
||||||
|
"public_key": self.public_key,
|
||||||
|
"signing_key": self.signing_key,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def apply_directives(directives: t.Iterable[str]) -> list[OpensshCertificateOption]:
|
||||||
|
if any(d not in _DIRECTIVES for d in directives):
|
||||||
|
raise ValueError(f"directives must be one of {', '.join(_DIRECTIVES)}")
|
||||||
|
|
||||||
|
directive_to_option = {
|
||||||
|
"no-x11-forwarding": OpensshCertificateOption(
|
||||||
|
option_type="extension", name="permit-x11-forwarding", data=""
|
||||||
|
),
|
||||||
|
"no-agent-forwarding": OpensshCertificateOption(
|
||||||
|
option_type="extension", name="permit-agent-forwarding", data=""
|
||||||
|
),
|
||||||
|
"no-port-forwarding": OpensshCertificateOption(
|
||||||
|
option_type="extension", name="permit-port-forwarding", data=""
|
||||||
|
),
|
||||||
|
"no-pty": OpensshCertificateOption(
|
||||||
|
option_type="extension", name="permit-pty", data=""
|
||||||
|
),
|
||||||
|
"no-user-rc": OpensshCertificateOption(
|
||||||
|
option_type="extension", name="permit-user-rc", data=""
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
if "clear" in directives:
|
||||||
|
return []
|
||||||
|
return list(
|
||||||
|
set(default_options()) - set(directive_to_option[d] for d in directives)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def default_options() -> list[OpensshCertificateOption]:
|
||||||
|
return [
|
||||||
|
OpensshCertificateOption(option_type="extension", name=name, data="")
|
||||||
|
for name in _EXTENSIONS
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def fingerprint(public_key: bytes) -> bytes:
|
||||||
|
"""Generates a SHA256 hash and formats output to resemble ``ssh-keygen``"""
|
||||||
|
h = sha256()
|
||||||
|
h.update(public_key)
|
||||||
|
return b"SHA256:" + b64encode(h.digest()).rstrip(b"=")
|
||||||
|
|
||||||
|
|
||||||
|
def get_cert_info_object(key_type: KeyType) -> OpensshCertificateInfo:
|
||||||
|
if key_type == "rsa":
|
||||||
|
return OpensshRSACertificateInfo()
|
||||||
|
if key_type == "dsa":
|
||||||
|
return OpensshDSACertificateInfo()
|
||||||
|
if key_type in ("ecdsa-nistp256", "ecdsa-nistp384", "ecdsa-nistp521"):
|
||||||
|
return OpensshECDSACertificateInfo()
|
||||||
|
if key_type == "ed25519":
|
||||||
|
return OpensshED25519CertificateInfo()
|
||||||
|
raise ValueError(f"{key_type} is not a valid key type")
|
||||||
|
|
||||||
|
|
||||||
|
def get_option_type(name: str) -> t.Literal["critical", "extension"]:
|
||||||
|
if name in _CRITICAL_OPTIONS:
|
||||||
|
return "critical"
|
||||||
|
if name in _EXTENSIONS:
|
||||||
|
return "extension"
|
||||||
|
raise ValueError(
|
||||||
|
f"{name} is not a valid option. "
|
||||||
|
"Custom options must start with 'critical:' or 'extension:' to indicate type"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def is_relative_time_string(time_string: str) -> bool:
|
||||||
|
return time_string.startswith("+") or time_string.startswith("-")
|
||||||
|
|
||||||
|
|
||||||
|
def parse_option_list(
|
||||||
|
option_list: t.Iterable[str],
|
||||||
|
) -> tuple[list[OpensshCertificateOption], list[OpensshCertificateOption]]:
|
||||||
|
critical_options = []
|
||||||
|
directives = []
|
||||||
|
extensions = []
|
||||||
|
|
||||||
|
for option in option_list:
|
||||||
|
if option.lower() in _DIRECTIVES:
|
||||||
|
directives.append(option.lower())
|
||||||
|
else:
|
||||||
|
option_object = OpensshCertificateOption.from_string(option)
|
||||||
|
if option_object.type == "critical":
|
||||||
|
critical_options.append(option_object)
|
||||||
|
else:
|
||||||
|
extensions.append(option_object)
|
||||||
|
|
||||||
|
return critical_options, list(set(extensions + apply_directives(directives)))
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"OpensshCertificateTimeParameters",
|
||||||
|
"OpensshCertificateOption",
|
||||||
|
"OpensshCertificateInfo",
|
||||||
|
"OpensshRSACertificateInfo",
|
||||||
|
"OpensshDSACertificateInfo",
|
||||||
|
"OpensshECDSACertificateInfo",
|
||||||
|
"OpensshED25519CertificateInfo",
|
||||||
|
"OpensshCertificate",
|
||||||
|
"apply_directives",
|
||||||
|
"default_options",
|
||||||
|
"fingerprint",
|
||||||
|
"get_cert_info_object",
|
||||||
|
"get_option_type",
|
||||||
|
"is_relative_time_string",
|
||||||
|
"parse_option_list",
|
||||||
|
)
|
||||||
841
plugins/module_utils/_openssh/cryptography.py
Normal file
841
plugins/module_utils/_openssh/cryptography.py
Normal file
@@ -0,0 +1,841 @@
|
|||||||
|
# Copyright (c) 2021, Andrew Pantuso (@ajpantuso) <ajpantuso@gmail.com>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import typing as t
|
||||||
|
from base64 import b64decode, b64encode
|
||||||
|
from getpass import getuser
|
||||||
|
from socket import gethostname
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from cryptography import __version__ as CRYPTOGRAPHY_VERSION
|
||||||
|
from cryptography.exceptions import InvalidSignature, UnsupportedAlgorithm
|
||||||
|
from cryptography.hazmat.primitives import hashes, serialization
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import dsa, ec, padding, rsa
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.ed448 import Ed448PrivateKey
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.ed25519 import (
|
||||||
|
Ed25519PrivateKey,
|
||||||
|
Ed25519PublicKey,
|
||||||
|
)
|
||||||
|
|
||||||
|
HAS_OPENSSH_SUPPORT = True
|
||||||
|
|
||||||
|
_ALGORITHM_PARAMETERS = {
|
||||||
|
"rsa": {
|
||||||
|
"default_size": 2048,
|
||||||
|
"valid_sizes": range(1024, 16384),
|
||||||
|
"signer_params": {
|
||||||
|
"padding": padding.PSS(
|
||||||
|
mgf=padding.MGF1(hashes.SHA256()),
|
||||||
|
salt_length=padding.PSS.MAX_LENGTH,
|
||||||
|
),
|
||||||
|
"algorithm": hashes.SHA256(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"dsa": {
|
||||||
|
"default_size": 1024,
|
||||||
|
"valid_sizes": [1024],
|
||||||
|
"signer_params": {
|
||||||
|
"algorithm": hashes.SHA256(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"ed25519": {
|
||||||
|
"default_size": 256,
|
||||||
|
"valid_sizes": [256],
|
||||||
|
"signer_params": {},
|
||||||
|
},
|
||||||
|
"ecdsa": {
|
||||||
|
"default_size": 256,
|
||||||
|
"valid_sizes": [256, 384, 521],
|
||||||
|
"signer_params": {
|
||||||
|
"signature_algorithm": ec.ECDSA(hashes.SHA256()),
|
||||||
|
},
|
||||||
|
"curves": {
|
||||||
|
256: ec.SECP256R1(),
|
||||||
|
384: ec.SECP384R1(),
|
||||||
|
521: ec.SECP521R1(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
except ImportError:
|
||||||
|
HAS_OPENSSH_SUPPORT = False
|
||||||
|
CRYPTOGRAPHY_VERSION = "0.0"
|
||||||
|
_ALGORITHM_PARAMETERS = {}
|
||||||
|
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.cryptography_support import (
|
||||||
|
is_potential_certificate_issuer_private_key,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if t.TYPE_CHECKING:
|
||||||
|
KeyFormat = t.Literal["SSH", "PKCS8", "PKCS1"]
|
||||||
|
KeySerializationFormat = t.Literal["PEM", "DER", "SSH"]
|
||||||
|
KeyType = t.Literal["rsa", "dsa", "ed25519", "ecdsa"]
|
||||||
|
|
||||||
|
PrivateKeyTypes = t.Union[
|
||||||
|
rsa.RSAPrivateKey,
|
||||||
|
dsa.DSAPrivateKey,
|
||||||
|
ec.EllipticCurvePrivateKey,
|
||||||
|
Ed25519PrivateKey,
|
||||||
|
]
|
||||||
|
PublicKeyTypes = t.Union[
|
||||||
|
rsa.RSAPublicKey, dsa.DSAPublicKey, ec.EllipticCurvePublicKey, Ed25519PublicKey
|
||||||
|
]
|
||||||
|
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.types import (
|
||||||
|
PublicKeyTypes as AllPublicKeyTypes,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_TEXT_ENCODING = "UTF-8"
|
||||||
|
|
||||||
|
|
||||||
|
class OpenSSHError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidAlgorithmError(OpenSSHError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidCommentError(OpenSSHError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidDataError(OpenSSHError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidPrivateKeyFileError(OpenSSHError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidPublicKeyFileError(OpenSSHError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidKeyFormatError(OpenSSHError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidKeySizeError(OpenSSHError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidKeyTypeError(OpenSSHError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidPassphraseError(OpenSSHError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidSignatureError(OpenSSHError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
_AsymmetricKeypair = t.TypeVar("_AsymmetricKeypair", bound="AsymmetricKeypair")
|
||||||
|
|
||||||
|
|
||||||
|
class AsymmetricKeypair:
|
||||||
|
"""Container for newly generated asymmetric key pairs or those loaded from existing files"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def generate(
|
||||||
|
cls: t.Type[_AsymmetricKeypair],
|
||||||
|
*,
|
||||||
|
keytype: KeyType = "rsa",
|
||||||
|
size: int | None = None,
|
||||||
|
passphrase: bytes | None = None,
|
||||||
|
) -> _AsymmetricKeypair:
|
||||||
|
"""Returns an Asymmetric_Keypair object generated with the supplied parameters
|
||||||
|
or defaults to an unencrypted RSA-2048 key
|
||||||
|
|
||||||
|
:keytype: One of rsa, dsa, ecdsa, ed25519
|
||||||
|
:size: The key length for newly generated keys
|
||||||
|
:passphrase: Secret of type Bytes used to encrypt the private key being generated
|
||||||
|
"""
|
||||||
|
|
||||||
|
if keytype not in _ALGORITHM_PARAMETERS:
|
||||||
|
raise InvalidKeyTypeError(
|
||||||
|
f"{keytype} is not a valid keytype. Valid keytypes are {', '.join(_ALGORITHM_PARAMETERS)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not size:
|
||||||
|
size = _ALGORITHM_PARAMETERS[keytype]["default_size"] # type: ignore
|
||||||
|
else:
|
||||||
|
if size not in _ALGORITHM_PARAMETERS[keytype]["valid_sizes"]: # type: ignore
|
||||||
|
raise InvalidKeySizeError(
|
||||||
|
f"{size} is not a valid key size for {keytype} keys"
|
||||||
|
)
|
||||||
|
size = t.cast(int, size)
|
||||||
|
|
||||||
|
privatekey: PrivateKeyTypes
|
||||||
|
if passphrase:
|
||||||
|
encryption_algorithm = get_encryption_algorithm(passphrase)
|
||||||
|
else:
|
||||||
|
encryption_algorithm = serialization.NoEncryption()
|
||||||
|
|
||||||
|
if keytype == "rsa":
|
||||||
|
privatekey = rsa.generate_private_key(
|
||||||
|
# Public exponent should always be 65537 to prevent issues
|
||||||
|
# if improper padding is used during signing
|
||||||
|
public_exponent=65537,
|
||||||
|
key_size=size,
|
||||||
|
)
|
||||||
|
elif keytype == "dsa":
|
||||||
|
privatekey = dsa.generate_private_key(
|
||||||
|
key_size=size,
|
||||||
|
)
|
||||||
|
elif keytype == "ed25519":
|
||||||
|
privatekey = Ed25519PrivateKey.generate()
|
||||||
|
elif keytype == "ecdsa":
|
||||||
|
privatekey = ec.generate_private_key(
|
||||||
|
_ALGORITHM_PARAMETERS["ecdsa"]["curves"][size], # type: ignore
|
||||||
|
)
|
||||||
|
|
||||||
|
publickey = privatekey.public_key()
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
keytype=keytype,
|
||||||
|
size=size,
|
||||||
|
privatekey=privatekey,
|
||||||
|
publickey=publickey,
|
||||||
|
encryption_algorithm=encryption_algorithm,
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def load(
|
||||||
|
cls: t.Type[_AsymmetricKeypair],
|
||||||
|
*,
|
||||||
|
path: str | os.PathLike,
|
||||||
|
passphrase: bytes | None = None,
|
||||||
|
private_key_format: KeySerializationFormat = "PEM",
|
||||||
|
public_key_format: KeySerializationFormat = "PEM",
|
||||||
|
no_public_key: bool = False,
|
||||||
|
) -> _AsymmetricKeypair:
|
||||||
|
"""Returns an Asymmetric_Keypair object loaded from the supplied file path
|
||||||
|
|
||||||
|
:path: A path to an existing private key to be loaded
|
||||||
|
:passphrase: Secret of type bytes used to decrypt the private key being loaded
|
||||||
|
:private_key_format: Format of private key to be loaded
|
||||||
|
:public_key_format: Format of public key to be loaded
|
||||||
|
:no_public_key: Set 'True' to only load a private key and automatically populate the matching public key
|
||||||
|
"""
|
||||||
|
|
||||||
|
if passphrase:
|
||||||
|
encryption_algorithm = get_encryption_algorithm(passphrase)
|
||||||
|
else:
|
||||||
|
encryption_algorithm = serialization.NoEncryption()
|
||||||
|
|
||||||
|
privatekey = load_privatekey(
|
||||||
|
path=path, passphrase=passphrase, key_format=private_key_format
|
||||||
|
)
|
||||||
|
publickey: AllPublicKeyTypes
|
||||||
|
if no_public_key:
|
||||||
|
publickey = privatekey.public_key()
|
||||||
|
else:
|
||||||
|
# TODO: Maybe we should check whether the public key actually fits the private key?
|
||||||
|
publickey = load_publickey(
|
||||||
|
path=str(path) + ".pub", key_format=public_key_format
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ed25519 keys are always of size 256 and do not have a key_size attribute
|
||||||
|
if isinstance(privatekey, Ed25519PrivateKey):
|
||||||
|
size: int = _ALGORITHM_PARAMETERS["ed25519"]["default_size"] # type: ignore
|
||||||
|
else:
|
||||||
|
size = privatekey.key_size
|
||||||
|
|
||||||
|
keytype: KeyType
|
||||||
|
if isinstance(privatekey, rsa.RSAPrivateKey):
|
||||||
|
keytype = "rsa"
|
||||||
|
if not isinstance(publickey, rsa.RSAPublicKey):
|
||||||
|
raise InvalidKeyTypeError(
|
||||||
|
f"Private key is an RSA key, but public key is of type '{type(publickey)}'"
|
||||||
|
)
|
||||||
|
elif isinstance(privatekey, dsa.DSAPrivateKey):
|
||||||
|
keytype = "dsa"
|
||||||
|
if not isinstance(publickey, dsa.DSAPublicKey):
|
||||||
|
raise InvalidKeyTypeError(
|
||||||
|
f"Private key is a DSA key, but public key is of type '{type(publickey)}'"
|
||||||
|
)
|
||||||
|
elif isinstance(privatekey, ec.EllipticCurvePrivateKey):
|
||||||
|
keytype = "ecdsa"
|
||||||
|
if not isinstance(publickey, ec.EllipticCurvePublicKey):
|
||||||
|
raise InvalidKeyTypeError(
|
||||||
|
f"Private key is an Elliptic Curve key, but public key is of type '{type(publickey)}'"
|
||||||
|
)
|
||||||
|
elif isinstance(privatekey, Ed25519PrivateKey):
|
||||||
|
keytype = "ed25519"
|
||||||
|
if not isinstance(publickey, Ed25519PublicKey):
|
||||||
|
raise InvalidKeyTypeError(
|
||||||
|
f"Private key is an Ed25519 key, but public key is of type '{type(publickey)}'"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise InvalidKeyTypeError(f"Key type '{type(privatekey)}' is not supported")
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
keytype=keytype,
|
||||||
|
size=size,
|
||||||
|
privatekey=privatekey,
|
||||||
|
publickey=publickey,
|
||||||
|
encryption_algorithm=encryption_algorithm,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
keytype: KeyType,
|
||||||
|
size: int,
|
||||||
|
privatekey: PrivateKeyTypes,
|
||||||
|
publickey: PublicKeyTypes,
|
||||||
|
encryption_algorithm: serialization.KeySerializationEncryption,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
:keytype: One of rsa, dsa, ecdsa, ed25519
|
||||||
|
:size: The key length for the private key of this key pair
|
||||||
|
:privatekey: Private key object of this key pair
|
||||||
|
:publickey: Public key object of this key pair
|
||||||
|
:encryption_algorithm: Hashed secret used to encrypt the private key of this key pair
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.__size = size
|
||||||
|
self.__keytype = keytype
|
||||||
|
self.__privatekey = privatekey
|
||||||
|
self.__publickey = publickey
|
||||||
|
self.__encryption_algorithm = encryption_algorithm
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.verify(signature=self.sign(b"message"), data=b"message")
|
||||||
|
except InvalidSignatureError as e:
|
||||||
|
raise InvalidPublicKeyFileError(
|
||||||
|
"The private key and public key of this keypair do not match"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
def __eq__(self, other: object) -> bool:
|
||||||
|
if not isinstance(other, AsymmetricKeypair):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return compare_publickeys(
|
||||||
|
self.public_key, other.public_key
|
||||||
|
) and compare_encryption_algorithms(
|
||||||
|
self.encryption_algorithm, other.encryption_algorithm
|
||||||
|
)
|
||||||
|
|
||||||
|
def __ne__(self, other: object) -> bool:
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
@property
|
||||||
|
def private_key(self) -> PrivateKeyTypes:
|
||||||
|
"""Returns the private key of this key pair"""
|
||||||
|
|
||||||
|
return self.__privatekey
|
||||||
|
|
||||||
|
@property
|
||||||
|
def public_key(self) -> PublicKeyTypes:
|
||||||
|
"""Returns the public key of this key pair"""
|
||||||
|
|
||||||
|
return self.__publickey
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size(self) -> int:
|
||||||
|
"""Returns the size of the private key of this key pair"""
|
||||||
|
|
||||||
|
return self.__size
|
||||||
|
|
||||||
|
@property
|
||||||
|
def key_type(self) -> KeyType:
|
||||||
|
"""Returns the key type of this key pair"""
|
||||||
|
|
||||||
|
return self.__keytype
|
||||||
|
|
||||||
|
@property
|
||||||
|
def encryption_algorithm(self) -> serialization.KeySerializationEncryption:
|
||||||
|
"""Returns the key encryption algorithm of this key pair"""
|
||||||
|
|
||||||
|
return self.__encryption_algorithm
|
||||||
|
|
||||||
|
def sign(self, data: bytes) -> bytes:
|
||||||
|
"""Returns signature of data signed with the private key of this key pair
|
||||||
|
|
||||||
|
:data: byteslike data to sign
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
return self.__privatekey.sign(
|
||||||
|
data, **_ALGORITHM_PARAMETERS[self.__keytype]["signer_params"] # type: ignore
|
||||||
|
)
|
||||||
|
except TypeError as e:
|
||||||
|
raise InvalidDataError(e) from e
|
||||||
|
|
||||||
|
def verify(self, *, signature: bytes, data: bytes) -> None:
|
||||||
|
"""Verifies that the signature associated with the provided data was signed
|
||||||
|
by the private key of this key pair.
|
||||||
|
|
||||||
|
:signature: signature to verify
|
||||||
|
:data: byteslike data signed by the provided signature
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
self.__publickey.verify(
|
||||||
|
signature,
|
||||||
|
data,
|
||||||
|
**_ALGORITHM_PARAMETERS[self.__keytype]["signer_params"], # type: ignore
|
||||||
|
)
|
||||||
|
except InvalidSignature as e:
|
||||||
|
raise InvalidSignatureError from e
|
||||||
|
|
||||||
|
def update_passphrase(self, passphrase: bytes | None = None) -> None:
|
||||||
|
"""Updates the encryption algorithm of this key pair
|
||||||
|
|
||||||
|
:passphrase: Byte secret used to encrypt this key pair
|
||||||
|
"""
|
||||||
|
|
||||||
|
if passphrase:
|
||||||
|
self.__encryption_algorithm = get_encryption_algorithm(passphrase)
|
||||||
|
else:
|
||||||
|
self.__encryption_algorithm = serialization.NoEncryption()
|
||||||
|
|
||||||
|
|
||||||
|
_OpensshKeypair = t.TypeVar("_OpensshKeypair", bound="OpensshKeypair")
|
||||||
|
|
||||||
|
|
||||||
|
class OpensshKeypair:
|
||||||
|
"""Container for OpenSSH encoded asymmetric key pairs"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def generate(
|
||||||
|
cls: t.Type[_OpensshKeypair],
|
||||||
|
*,
|
||||||
|
keytype: KeyType = "rsa",
|
||||||
|
size: int | None = None,
|
||||||
|
passphrase: bytes | None = None,
|
||||||
|
comment: str | None = None,
|
||||||
|
) -> _OpensshKeypair:
|
||||||
|
"""Returns an Openssh_Keypair object generated using the supplied parameters or defaults to a RSA-2048 key
|
||||||
|
|
||||||
|
:keytype: One of rsa, dsa, ecdsa, ed25519
|
||||||
|
:size: The key length for newly generated keys
|
||||||
|
:passphrase: Secret of type Bytes used to encrypt the newly generated private key
|
||||||
|
:comment: Comment for a newly generated OpenSSH public key
|
||||||
|
"""
|
||||||
|
|
||||||
|
if comment is None:
|
||||||
|
comment = f"{getuser()}@{gethostname()}"
|
||||||
|
|
||||||
|
asym_keypair = AsymmetricKeypair.generate(
|
||||||
|
keytype=keytype, size=size, passphrase=passphrase
|
||||||
|
)
|
||||||
|
openssh_privatekey = cls.encode_openssh_privatekey(
|
||||||
|
asym_keypair=asym_keypair, key_format="SSH"
|
||||||
|
)
|
||||||
|
openssh_publickey = cls.encode_openssh_publickey(
|
||||||
|
asym_keypair=asym_keypair, comment=comment
|
||||||
|
)
|
||||||
|
fingerprint = calculate_fingerprint(openssh_publickey)
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
asym_keypair=asym_keypair,
|
||||||
|
openssh_privatekey=openssh_privatekey,
|
||||||
|
openssh_publickey=openssh_publickey,
|
||||||
|
fingerprint=fingerprint,
|
||||||
|
comment=comment,
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def load(
|
||||||
|
cls: t.Type[_OpensshKeypair],
|
||||||
|
*,
|
||||||
|
path: str | os.PathLike,
|
||||||
|
passphrase: bytes | None = None,
|
||||||
|
no_public_key: bool = False,
|
||||||
|
) -> _OpensshKeypair:
|
||||||
|
"""Returns an Openssh_Keypair object loaded from the supplied file path
|
||||||
|
|
||||||
|
:path: A path to an existing private key to be loaded
|
||||||
|
:passphrase: Secret used to decrypt the private key being loaded
|
||||||
|
:no_public_key: Set 'True' to only load a private key and automatically populate the matching public key
|
||||||
|
"""
|
||||||
|
|
||||||
|
if no_public_key:
|
||||||
|
comment = ""
|
||||||
|
else:
|
||||||
|
comment = extract_comment(str(path) + ".pub")
|
||||||
|
|
||||||
|
asym_keypair = AsymmetricKeypair.load(
|
||||||
|
path=path,
|
||||||
|
passphrase=passphrase,
|
||||||
|
private_key_format="SSH",
|
||||||
|
public_key_format="SSH",
|
||||||
|
no_public_key=no_public_key,
|
||||||
|
)
|
||||||
|
openssh_privatekey = cls.encode_openssh_privatekey(
|
||||||
|
asym_keypair=asym_keypair, key_format="SSH"
|
||||||
|
)
|
||||||
|
openssh_publickey = cls.encode_openssh_publickey(
|
||||||
|
asym_keypair=asym_keypair, comment=comment
|
||||||
|
)
|
||||||
|
fingerprint = calculate_fingerprint(openssh_publickey)
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
asym_keypair=asym_keypair,
|
||||||
|
openssh_privatekey=openssh_privatekey,
|
||||||
|
openssh_publickey=openssh_publickey,
|
||||||
|
fingerprint=fingerprint,
|
||||||
|
comment=comment,
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def encode_openssh_privatekey(
|
||||||
|
*, asym_keypair: AsymmetricKeypair, key_format: KeyFormat
|
||||||
|
) -> bytes:
|
||||||
|
"""Returns an OpenSSH encoded private key for a given keypair
|
||||||
|
|
||||||
|
:asym_keypair: Asymmetric_Keypair from the private key is extracted
|
||||||
|
:key_format: Format of the encoded private key.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if key_format == "SSH":
|
||||||
|
privatekey_format = serialization.PrivateFormat.OpenSSH
|
||||||
|
elif key_format == "PKCS8":
|
||||||
|
privatekey_format = serialization.PrivateFormat.PKCS8
|
||||||
|
elif key_format == "PKCS1":
|
||||||
|
if asym_keypair.key_type == "ed25519":
|
||||||
|
raise InvalidKeyFormatError(
|
||||||
|
"ed25519 keys cannot be represented in PKCS1 format"
|
||||||
|
)
|
||||||
|
privatekey_format = serialization.PrivateFormat.TraditionalOpenSSL
|
||||||
|
else:
|
||||||
|
raise InvalidKeyFormatError(
|
||||||
|
"The accepted private key formats are SSH, PKCS8, and PKCS1"
|
||||||
|
)
|
||||||
|
|
||||||
|
encoded_privatekey = asym_keypair.private_key.private_bytes(
|
||||||
|
encoding=serialization.Encoding.PEM,
|
||||||
|
format=privatekey_format,
|
||||||
|
encryption_algorithm=asym_keypair.encryption_algorithm,
|
||||||
|
)
|
||||||
|
|
||||||
|
return encoded_privatekey
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def encode_openssh_publickey(
|
||||||
|
*, asym_keypair: AsymmetricKeypair, comment: str
|
||||||
|
) -> bytes:
|
||||||
|
"""Returns an OpenSSH encoded public key for a given keypair
|
||||||
|
|
||||||
|
:asym_keypair: Asymmetric_Keypair from the public key is extracted
|
||||||
|
:comment: Comment to apply to the end of the returned OpenSSH encoded public key
|
||||||
|
"""
|
||||||
|
encoded_publickey = asym_keypair.public_key.public_bytes(
|
||||||
|
encoding=serialization.Encoding.OpenSSH,
|
||||||
|
format=serialization.PublicFormat.OpenSSH,
|
||||||
|
)
|
||||||
|
|
||||||
|
validate_comment(comment)
|
||||||
|
|
||||||
|
encoded_publickey += (
|
||||||
|
(b" " + comment.encode(encoding=_TEXT_ENCODING)) if comment else b""
|
||||||
|
)
|
||||||
|
|
||||||
|
return encoded_publickey
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
asym_keypair: AsymmetricKeypair,
|
||||||
|
openssh_privatekey: bytes,
|
||||||
|
openssh_publickey: bytes,
|
||||||
|
fingerprint: str,
|
||||||
|
comment: str | None,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
:asym_keypair: An Asymmetric_Keypair object from which the OpenSSH encoded keypair is derived
|
||||||
|
:openssh_privatekey: An OpenSSH encoded private key
|
||||||
|
:openssh_privatekey: An OpenSSH encoded public key
|
||||||
|
:fingerprint: The fingerprint of the OpenSSH encoded public key of this keypair
|
||||||
|
:comment: Comment applied to the OpenSSH public key of this keypair
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.__asym_keypair = asym_keypair
|
||||||
|
self.__openssh_privatekey = openssh_privatekey
|
||||||
|
self.__openssh_publickey = openssh_publickey
|
||||||
|
self.__fingerprint = fingerprint
|
||||||
|
self.__comment = comment
|
||||||
|
|
||||||
|
def __eq__(self, other: object) -> bool:
|
||||||
|
if not isinstance(other, OpensshKeypair):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return (
|
||||||
|
self.asymmetric_keypair == other.asymmetric_keypair
|
||||||
|
and self.comment == other.comment
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def asymmetric_keypair(self) -> AsymmetricKeypair:
|
||||||
|
"""Returns the underlying asymmetric key pair of this OpenSSH encoded key pair"""
|
||||||
|
|
||||||
|
return self.__asym_keypair
|
||||||
|
|
||||||
|
@property
|
||||||
|
def private_key(self) -> bytes:
|
||||||
|
"""Returns the OpenSSH formatted private key of this key pair"""
|
||||||
|
|
||||||
|
return self.__openssh_privatekey
|
||||||
|
|
||||||
|
@property
|
||||||
|
def public_key(self) -> bytes:
|
||||||
|
"""Returns the OpenSSH formatted public key of this key pair"""
|
||||||
|
|
||||||
|
return self.__openssh_publickey
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size(self) -> int:
|
||||||
|
"""Returns the size of the private key of this key pair"""
|
||||||
|
|
||||||
|
return self.__asym_keypair.size
|
||||||
|
|
||||||
|
@property
|
||||||
|
def key_type(self) -> KeyType:
|
||||||
|
"""Returns the key type of this key pair"""
|
||||||
|
|
||||||
|
return self.__asym_keypair.key_type
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fingerprint(self) -> str:
|
||||||
|
"""Returns the fingerprint (SHA256 Hash) of the public key of this key pair"""
|
||||||
|
|
||||||
|
return self.__fingerprint
|
||||||
|
|
||||||
|
@property
|
||||||
|
def comment(self) -> str | None:
|
||||||
|
"""Returns the comment applied to the OpenSSH formatted public key of this key pair"""
|
||||||
|
|
||||||
|
return self.__comment
|
||||||
|
|
||||||
|
@comment.setter
|
||||||
|
def comment(self, comment: str) -> bytes:
|
||||||
|
"""Updates the comment applied to the OpenSSH formatted public key of this key pair
|
||||||
|
|
||||||
|
:comment: Text to update the OpenSSH public key comment
|
||||||
|
"""
|
||||||
|
|
||||||
|
validate_comment(comment)
|
||||||
|
|
||||||
|
self.__comment = comment
|
||||||
|
encoded_comment = (
|
||||||
|
f" {self.__comment}".encode(encoding=_TEXT_ENCODING)
|
||||||
|
if self.__comment
|
||||||
|
else b""
|
||||||
|
)
|
||||||
|
self.__openssh_publickey = (
|
||||||
|
b" ".join(self.__openssh_publickey.split(b" ", 2)[:2]) + encoded_comment
|
||||||
|
)
|
||||||
|
return self.__openssh_publickey
|
||||||
|
|
||||||
|
def update_passphrase(self, passphrase: bytes | None) -> None:
|
||||||
|
"""Updates the passphrase used to encrypt the private key of this keypair
|
||||||
|
|
||||||
|
:passphrase: Text secret used for encryption
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.__asym_keypair.update_passphrase(passphrase)
|
||||||
|
self.__openssh_privatekey = OpensshKeypair.encode_openssh_privatekey(
|
||||||
|
asym_keypair=self.__asym_keypair, key_format="SSH"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def load_privatekey(
|
||||||
|
*,
|
||||||
|
path: str | os.PathLike,
|
||||||
|
passphrase: bytes | None,
|
||||||
|
key_format: KeySerializationFormat,
|
||||||
|
) -> PrivateKeyTypes:
|
||||||
|
privatekey_loaders = {
|
||||||
|
"PEM": serialization.load_pem_private_key,
|
||||||
|
"DER": serialization.load_der_private_key,
|
||||||
|
"SSH": serialization.load_ssh_private_key,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
privatekey_loader = privatekey_loaders[key_format]
|
||||||
|
except KeyError as e:
|
||||||
|
raise InvalidKeyFormatError(
|
||||||
|
f"{key_format} is not a valid key format ({','.join(privatekey_loaders)})"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
if not os.path.exists(path):
|
||||||
|
raise InvalidPrivateKeyFileError(f"No file was found at {path}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
try:
|
||||||
|
privatekey = privatekey_loader(
|
||||||
|
data=content,
|
||||||
|
password=passphrase,
|
||||||
|
)
|
||||||
|
except ValueError as exc:
|
||||||
|
# Revert to PEM if key could not be loaded in SSH format
|
||||||
|
if key_format == "SSH":
|
||||||
|
privatekey = privatekey_loaders["PEM"](
|
||||||
|
data=content,
|
||||||
|
password=passphrase,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise InvalidPrivateKeyFileError(exc) from exc
|
||||||
|
except ValueError as e:
|
||||||
|
raise InvalidPrivateKeyFileError(e) from e
|
||||||
|
except TypeError as e:
|
||||||
|
raise InvalidPassphraseError(e) from e
|
||||||
|
except UnsupportedAlgorithm as e:
|
||||||
|
raise InvalidAlgorithmError(e) from e
|
||||||
|
|
||||||
|
if not is_potential_certificate_issuer_private_key(privatekey) or isinstance(
|
||||||
|
privatekey, Ed448PrivateKey
|
||||||
|
):
|
||||||
|
raise InvalidPrivateKeyFileError(
|
||||||
|
f"{privatekey} is not a supported private key type"
|
||||||
|
)
|
||||||
|
return privatekey
|
||||||
|
|
||||||
|
|
||||||
|
def load_publickey(
|
||||||
|
*, path: str | os.PathLike, key_format: KeySerializationFormat
|
||||||
|
) -> AllPublicKeyTypes:
|
||||||
|
publickey_loaders = {
|
||||||
|
"PEM": serialization.load_pem_public_key,
|
||||||
|
"DER": serialization.load_der_public_key,
|
||||||
|
"SSH": serialization.load_ssh_public_key,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
publickey_loader = publickey_loaders[key_format]
|
||||||
|
except KeyError as e:
|
||||||
|
raise InvalidKeyFormatError(
|
||||||
|
f"{key_format} is not a valid key format ({','.join(publickey_loaders)})"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
if not os.path.exists(path):
|
||||||
|
raise InvalidPublicKeyFileError(f"No file was found at {path}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
publickey = publickey_loader(
|
||||||
|
data=content,
|
||||||
|
)
|
||||||
|
except ValueError as e:
|
||||||
|
raise InvalidPublicKeyFileError(e) from e
|
||||||
|
except UnsupportedAlgorithm as e:
|
||||||
|
raise InvalidAlgorithmError(e) from e
|
||||||
|
|
||||||
|
return publickey
|
||||||
|
|
||||||
|
|
||||||
|
def compare_publickeys(pk1: PublicKeyTypes, pk2: PublicKeyTypes) -> bool:
|
||||||
|
a = isinstance(pk1, Ed25519PublicKey)
|
||||||
|
b = isinstance(pk2, Ed25519PublicKey)
|
||||||
|
if a or b:
|
||||||
|
if not a or not b:
|
||||||
|
return False
|
||||||
|
a_bytes = pk1.public_bytes(
|
||||||
|
serialization.Encoding.Raw, serialization.PublicFormat.Raw
|
||||||
|
)
|
||||||
|
b_bytes = pk2.public_bytes(
|
||||||
|
serialization.Encoding.Raw, serialization.PublicFormat.Raw
|
||||||
|
)
|
||||||
|
return a_bytes == b_bytes
|
||||||
|
return pk1.public_numbers() == pk2.public_numbers() # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
def compare_encryption_algorithms(
|
||||||
|
ea1: serialization.KeySerializationEncryption,
|
||||||
|
ea2: serialization.KeySerializationEncryption,
|
||||||
|
) -> bool:
|
||||||
|
if isinstance(ea1, serialization.NoEncryption) and isinstance(
|
||||||
|
ea2, serialization.NoEncryption
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
if isinstance(ea1, serialization.BestAvailableEncryption) and isinstance(
|
||||||
|
ea2, serialization.BestAvailableEncryption
|
||||||
|
):
|
||||||
|
return ea1.password == ea2.password
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_encryption_algorithm(
|
||||||
|
passphrase: bytes,
|
||||||
|
) -> serialization.KeySerializationEncryption:
|
||||||
|
try:
|
||||||
|
return serialization.BestAvailableEncryption(passphrase)
|
||||||
|
except ValueError as e:
|
||||||
|
raise InvalidPassphraseError(e) from e
|
||||||
|
|
||||||
|
|
||||||
|
def validate_comment(comment: str) -> None:
|
||||||
|
if not hasattr(comment, "encode"):
|
||||||
|
raise InvalidCommentError(f"{comment} cannot be encoded to text")
|
||||||
|
|
||||||
|
|
||||||
|
def extract_comment(path: str | os.PathLike) -> str:
|
||||||
|
|
||||||
|
if not os.path.exists(path):
|
||||||
|
raise InvalidPublicKeyFileError(f"No file was found at {path}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
fields = f.read().split(b" ", 2)
|
||||||
|
if len(fields) == 3:
|
||||||
|
comment = fields[2].decode(_TEXT_ENCODING)
|
||||||
|
else:
|
||||||
|
comment = ""
|
||||||
|
except (IOError, OSError) as e:
|
||||||
|
raise InvalidPublicKeyFileError(e) from e
|
||||||
|
|
||||||
|
return comment
|
||||||
|
|
||||||
|
|
||||||
|
def calculate_fingerprint(openssh_publickey: bytes) -> str:
|
||||||
|
digest = hashes.Hash(hashes.SHA256())
|
||||||
|
decoded_pubkey = b64decode(openssh_publickey.split(b" ")[1])
|
||||||
|
digest.update(decoded_pubkey)
|
||||||
|
|
||||||
|
value = b64encode(digest.finalize()).decode(encoding=_TEXT_ENCODING).rstrip("=")
|
||||||
|
return f"SHA256:{value}"
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"HAS_OPENSSH_SUPPORT",
|
||||||
|
"CRYPTOGRAPHY_VERSION",
|
||||||
|
"OpenSSHError",
|
||||||
|
"InvalidAlgorithmError",
|
||||||
|
"InvalidCommentError",
|
||||||
|
"InvalidDataError",
|
||||||
|
"InvalidPrivateKeyFileError",
|
||||||
|
"InvalidPublicKeyFileError",
|
||||||
|
"InvalidKeyFormatError",
|
||||||
|
"InvalidKeySizeError",
|
||||||
|
"InvalidKeyTypeError",
|
||||||
|
"InvalidPassphraseError",
|
||||||
|
"InvalidSignatureError",
|
||||||
|
"AsymmetricKeypair",
|
||||||
|
"OpensshKeypair",
|
||||||
|
"load_privatekey",
|
||||||
|
"load_publickey",
|
||||||
|
"compare_publickeys",
|
||||||
|
"compare_encryption_algorithms",
|
||||||
|
"get_encryption_algorithm",
|
||||||
|
"validate_comment",
|
||||||
|
"extract_comment",
|
||||||
|
"calculate_fingerprint",
|
||||||
|
)
|
||||||
360
plugins/module_utils/_openssh/utils.py
Normal file
360
plugins/module_utils/_openssh/utils.py
Normal file
@@ -0,0 +1,360 @@
|
|||||||
|
# Copyright (c) 2020, Doug Stanley <doug+ansible@technologixllc.com>
|
||||||
|
# Copyright (c) 2021, Andrew Pantuso (@ajpantuso) <ajpantuso@gmail.com>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import typing as t
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from struct import Struct
|
||||||
|
|
||||||
|
|
||||||
|
# Protocol References
|
||||||
|
# -------------------
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc4251
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc4253
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc5656
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc8032
|
||||||
|
#
|
||||||
|
# Inspired by:
|
||||||
|
# ------------
|
||||||
|
# https://github.com/pyca/cryptography/blob/main/src/cryptography/hazmat/primitives/serialization/ssh.py
|
||||||
|
# https://github.com/paramiko/paramiko/blob/master/paramiko/message.py
|
||||||
|
|
||||||
|
# 0 (False) or 1 (True) encoded as a single byte
|
||||||
|
_BOOLEAN = Struct(b"?")
|
||||||
|
# Unsigned 8-bit integer in network-byte-order
|
||||||
|
_UBYTE = Struct(b"!B")
|
||||||
|
_UBYTE_MAX = 0xFF
|
||||||
|
# Unsigned 32-bit integer in network-byte-order
|
||||||
|
_UINT32 = Struct(b"!I")
|
||||||
|
# Unsigned 32-bit little endian integer
|
||||||
|
_UINT32_LE = Struct(b"<I")
|
||||||
|
_UINT32_MAX = 0xFFFFFFFF
|
||||||
|
# Unsigned 64-bit integer in network-byte-order
|
||||||
|
_UINT64 = Struct(b"!Q")
|
||||||
|
_UINT64_MAX = 0xFFFFFFFFFFFFFFFF
|
||||||
|
|
||||||
|
|
||||||
|
_T = t.TypeVar("_T")
|
||||||
|
|
||||||
|
|
||||||
|
def any_in(sequence: t.Iterable[_T], *elements: _T) -> bool:
|
||||||
|
return any(e in sequence for e in elements)
|
||||||
|
|
||||||
|
|
||||||
|
def file_mode(path: str | os.PathLike) -> int:
|
||||||
|
if not os.path.exists(path):
|
||||||
|
return 0o000
|
||||||
|
return os.stat(path).st_mode & 0o777
|
||||||
|
|
||||||
|
|
||||||
|
def parse_openssh_version(version_string: str) -> str | None:
|
||||||
|
"""Parse the version output of ssh -V and return version numbers that can be compared"""
|
||||||
|
|
||||||
|
parsed_result = re.match(
|
||||||
|
r"^.*openssh_(?P<version>[0-9.]+)(p?[0-9]+)[^0-9]*.*$", version_string.lower()
|
||||||
|
)
|
||||||
|
if parsed_result is not None:
|
||||||
|
version = parsed_result.group("version").strip()
|
||||||
|
else:
|
||||||
|
version = None
|
||||||
|
|
||||||
|
return version
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def secure_open(*, path: str | os.PathLike, mode: int) -> t.Iterator[int]:
|
||||||
|
fd = os.open(path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, mode)
|
||||||
|
try:
|
||||||
|
yield fd
|
||||||
|
finally:
|
||||||
|
os.close(fd)
|
||||||
|
|
||||||
|
|
||||||
|
def secure_write(*, path: str | os.PathLike, mode: int, content: bytes) -> None:
|
||||||
|
with secure_open(path=path, mode=mode) as fd:
|
||||||
|
os.write(fd, content)
|
||||||
|
|
||||||
|
|
||||||
|
# See https://datatracker.ietf.org/doc/html/rfc4251#section-5 for SSH data types
|
||||||
|
class OpensshParser:
|
||||||
|
"""Parser for OpenSSH encoded objects"""
|
||||||
|
|
||||||
|
BOOLEAN_OFFSET = 1
|
||||||
|
UINT32_OFFSET = 4
|
||||||
|
UINT64_OFFSET = 8
|
||||||
|
|
||||||
|
def __init__(self, *, data: bytes | bytearray) -> None:
|
||||||
|
if not isinstance(data, (bytes, bytearray)):
|
||||||
|
raise TypeError(f"Data must be bytes-like not {type(data)}")
|
||||||
|
|
||||||
|
self._data = memoryview(data)
|
||||||
|
self._pos = 0
|
||||||
|
|
||||||
|
def boolean(self) -> bool:
|
||||||
|
next_pos = self._check_position(self.BOOLEAN_OFFSET)
|
||||||
|
|
||||||
|
value = _BOOLEAN.unpack(self._data[self._pos : next_pos])[0]
|
||||||
|
self._pos = next_pos
|
||||||
|
return value
|
||||||
|
|
||||||
|
def uint32(self) -> int:
|
||||||
|
next_pos = self._check_position(self.UINT32_OFFSET)
|
||||||
|
|
||||||
|
value = _UINT32.unpack(self._data[self._pos : next_pos])[0]
|
||||||
|
self._pos = next_pos
|
||||||
|
return value
|
||||||
|
|
||||||
|
def uint64(self) -> int:
|
||||||
|
next_pos = self._check_position(self.UINT64_OFFSET)
|
||||||
|
|
||||||
|
value = _UINT64.unpack(self._data[self._pos : next_pos])[0]
|
||||||
|
self._pos = next_pos
|
||||||
|
return value
|
||||||
|
|
||||||
|
def string(self) -> bytes:
|
||||||
|
length = self.uint32()
|
||||||
|
|
||||||
|
next_pos = self._check_position(length)
|
||||||
|
|
||||||
|
value = self._data[self._pos : next_pos]
|
||||||
|
self._pos = next_pos
|
||||||
|
# Cast to bytes is required as a memoryview slice is itself a memoryview
|
||||||
|
return bytes(value)
|
||||||
|
|
||||||
|
def mpint(self) -> int:
|
||||||
|
return self._big_int(self.string(), "big", signed=True)
|
||||||
|
|
||||||
|
def name_list(self) -> list[str]:
|
||||||
|
raw_string = self.string()
|
||||||
|
return raw_string.decode("ASCII").split(",")
|
||||||
|
|
||||||
|
# Convenience function, but not an official data type from SSH
|
||||||
|
def string_list(self) -> list[bytes]:
|
||||||
|
result = []
|
||||||
|
raw_string = self.string()
|
||||||
|
|
||||||
|
if raw_string:
|
||||||
|
parser = OpensshParser(data=raw_string)
|
||||||
|
while parser.remaining_bytes():
|
||||||
|
result.append(parser.string())
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Convenience function, but not an official data type from SSH
|
||||||
|
def option_list(self) -> list[tuple[bytes, bytes]]:
|
||||||
|
result = []
|
||||||
|
raw_string = self.string()
|
||||||
|
|
||||||
|
if raw_string:
|
||||||
|
parser = OpensshParser(data=raw_string)
|
||||||
|
|
||||||
|
while parser.remaining_bytes():
|
||||||
|
name = parser.string()
|
||||||
|
data = parser.string()
|
||||||
|
if data:
|
||||||
|
# data is doubly-encoded
|
||||||
|
data = OpensshParser(data=data).string()
|
||||||
|
result.append((name, data))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def seek(self, offset: int) -> int:
|
||||||
|
self._pos = self._check_position(offset)
|
||||||
|
|
||||||
|
return self._pos
|
||||||
|
|
||||||
|
def remaining_bytes(self) -> int:
|
||||||
|
return len(self._data) - self._pos
|
||||||
|
|
||||||
|
def _check_position(self, offset: int) -> int:
|
||||||
|
if self._pos + offset > len(self._data):
|
||||||
|
raise ValueError(f"Insufficient data remaining at position: {self._pos}")
|
||||||
|
if self._pos + offset < 0:
|
||||||
|
raise ValueError("Position cannot be less than zero.")
|
||||||
|
return self._pos + offset
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def signature_data(cls, *, signature_string: bytes) -> dict[str, bytes | int]:
|
||||||
|
signature_data: dict[str, bytes | int] = {}
|
||||||
|
|
||||||
|
parser = cls(data=signature_string)
|
||||||
|
signature_type = parser.string()
|
||||||
|
signature_blob = parser.string()
|
||||||
|
|
||||||
|
blob_parser = cls(data=signature_blob)
|
||||||
|
if signature_type in (b"ssh-rsa", b"rsa-sha2-256", b"rsa-sha2-512"):
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc4253#section-6.6
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc8332#section-3
|
||||||
|
signature_data["s"] = cls._big_int(signature_blob, "big")
|
||||||
|
elif signature_type == b"ssh-dss":
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc4253#section-6.6
|
||||||
|
signature_data["r"] = cls._big_int(signature_blob[:20], "big")
|
||||||
|
signature_data["s"] = cls._big_int(signature_blob[20:], "big")
|
||||||
|
elif signature_type in (
|
||||||
|
b"ecdsa-sha2-nistp256",
|
||||||
|
b"ecdsa-sha2-nistp384",
|
||||||
|
b"ecdsa-sha2-nistp521",
|
||||||
|
):
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc5656#section-3.1.2
|
||||||
|
signature_data["r"] = blob_parser.mpint()
|
||||||
|
signature_data["s"] = blob_parser.mpint()
|
||||||
|
elif signature_type == b"ssh-ed25519":
|
||||||
|
# https://datatracker.ietf.org/doc/html/rfc8032#section-5.1.2
|
||||||
|
signature_data["R"] = cls._big_int(signature_blob[:32], "little")
|
||||||
|
signature_data["S"] = cls._big_int(signature_blob[32:], "little")
|
||||||
|
else:
|
||||||
|
raise ValueError(f"{signature_type!r} is not a valid signature type")
|
||||||
|
|
||||||
|
signature_data["signature_type"] = signature_type
|
||||||
|
|
||||||
|
return signature_data
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _big_int(
|
||||||
|
cls,
|
||||||
|
raw_string: bytes,
|
||||||
|
byte_order: t.Literal["big", "little"],
|
||||||
|
signed: bool = False,
|
||||||
|
) -> int:
|
||||||
|
if byte_order not in ("big", "little"):
|
||||||
|
raise ValueError(
|
||||||
|
f"Byte_order must be one of (big, little) not {byte_order}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return int.from_bytes(raw_string, byte_order, signed=signed)
|
||||||
|
|
||||||
|
|
||||||
|
class _OpensshWriter:
|
||||||
|
"""Writes SSH encoded values to a bytes-like buffer
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
This class is a private API and must not be exported outside of the openssh module_utils.
|
||||||
|
It is not to be used to construct Openssh objects, but rather as a utility to assist
|
||||||
|
in validating parsed material.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *, buffer: bytearray | None = None):
|
||||||
|
if buffer is not None:
|
||||||
|
if not isinstance(buffer, bytearray):
|
||||||
|
raise TypeError(f"Buffer must be a bytearray, not {type(buffer)}")
|
||||||
|
else:
|
||||||
|
buffer = bytearray()
|
||||||
|
|
||||||
|
self._buff: bytearray = buffer
|
||||||
|
|
||||||
|
def boolean(self, value: bool) -> t.Self:
|
||||||
|
if not isinstance(value, bool):
|
||||||
|
raise TypeError(f"Value must be of type bool not {type(value)}")
|
||||||
|
|
||||||
|
self._buff.extend(_BOOLEAN.pack(value))
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def uint32(self, value: int) -> t.Self:
|
||||||
|
if not isinstance(value, int):
|
||||||
|
raise TypeError(f"Value must be of type int not {type(value)}")
|
||||||
|
if value < 0 or value > _UINT32_MAX:
|
||||||
|
raise ValueError(
|
||||||
|
f"Value must be a positive integer less than {_UINT32_MAX}"
|
||||||
|
)
|
||||||
|
|
||||||
|
self._buff.extend(_UINT32.pack(value))
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def uint64(self, value: int) -> t.Self:
|
||||||
|
if not isinstance(value, int):
|
||||||
|
raise TypeError(f"Value must be of type int not {type(value)}")
|
||||||
|
if value < 0 or value > _UINT64_MAX:
|
||||||
|
raise ValueError(
|
||||||
|
f"Value must be a positive integer less than {_UINT64_MAX}"
|
||||||
|
)
|
||||||
|
|
||||||
|
self._buff.extend(_UINT64.pack(value))
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def string(self, value: bytes | bytearray) -> t.Self:
|
||||||
|
if not isinstance(value, (bytes, bytearray)):
|
||||||
|
raise TypeError(f"Value must be bytes-like not {type(value)}")
|
||||||
|
self.uint32(len(value))
|
||||||
|
self._buff.extend(value)
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def mpint(self, value: int) -> t.Self:
|
||||||
|
if not isinstance(value, int):
|
||||||
|
raise TypeError(f"Value must be of type int not {type(value)}")
|
||||||
|
|
||||||
|
self.string(self._int_to_mpint(value))
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def name_list(self, value: list[str]) -> t.Self:
|
||||||
|
if not isinstance(value, list):
|
||||||
|
raise TypeError(f"Value must be a list of byte strings not {type(value)}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.string(",".join(value).encode("ASCII"))
|
||||||
|
except UnicodeEncodeError as e:
|
||||||
|
raise ValueError(
|
||||||
|
f"Name-list's must consist of US-ASCII characters: {e}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def string_list(self, value: list[bytes]) -> t.Self:
|
||||||
|
if not isinstance(value, list):
|
||||||
|
raise TypeError(f"Value must be a list of byte string not {type(value)}")
|
||||||
|
|
||||||
|
writer = _OpensshWriter()
|
||||||
|
for s in value:
|
||||||
|
writer.string(s)
|
||||||
|
|
||||||
|
self.string(writer.bytes())
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def option_list(self, value: list[tuple[bytes, bytes]]) -> t.Self:
|
||||||
|
if not isinstance(value, list) or (value and not isinstance(value[0], tuple)):
|
||||||
|
raise TypeError("Value must be a list of tuples")
|
||||||
|
|
||||||
|
writer = _OpensshWriter()
|
||||||
|
for name, data in value:
|
||||||
|
writer.string(name)
|
||||||
|
# SSH option data is encoded twice though this behavior is not documented
|
||||||
|
writer.string(_OpensshWriter().string(data).bytes() if data else bytes())
|
||||||
|
|
||||||
|
self.string(writer.bytes())
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _int_to_mpint(num: int) -> bytes:
|
||||||
|
byte_length = (num.bit_length() + 7) // 8
|
||||||
|
try:
|
||||||
|
return num.to_bytes(byte_length, "big", signed=True)
|
||||||
|
# Handles values which require \x00 or \xFF to pad sign-bit
|
||||||
|
except OverflowError:
|
||||||
|
return num.to_bytes(byte_length + 1, "big", signed=True)
|
||||||
|
|
||||||
|
def bytes(self) -> bytes:
|
||||||
|
return bytes(self._buff)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"any_in",
|
||||||
|
"file_mode",
|
||||||
|
"parse_openssh_version",
|
||||||
|
"secure_open",
|
||||||
|
"secure_write",
|
||||||
|
"OpensshParser",
|
||||||
|
)
|
||||||
59
plugins/module_utils/_serial.py
Normal file
59
plugins/module_utils/_serial.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
# Copyright (c) 2024, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.math import (
|
||||||
|
convert_int_to_hex,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def th(number: int) -> str:
|
||||||
|
abs_number = abs(number)
|
||||||
|
mod_10 = abs_number % 10
|
||||||
|
mod_100 = abs_number % 100
|
||||||
|
if mod_100 not in (11, 12, 13):
|
||||||
|
if mod_10 == 1:
|
||||||
|
return "st"
|
||||||
|
if mod_10 == 2:
|
||||||
|
return "nd"
|
||||||
|
if mod_10 == 3:
|
||||||
|
return "rd"
|
||||||
|
return "th"
|
||||||
|
|
||||||
|
|
||||||
|
def parse_serial(value: str | bytes) -> int:
|
||||||
|
"""
|
||||||
|
Given a colon-separated string of hexadecimal byte values, converts it to an integer.
|
||||||
|
"""
|
||||||
|
value_str = to_text(value)
|
||||||
|
result = 0
|
||||||
|
for i, part in enumerate(value_str.split(":")):
|
||||||
|
try:
|
||||||
|
part_value = int(part, 16)
|
||||||
|
if part_value < 0 or part_value > 255:
|
||||||
|
raise ValueError("the value is not in range [0, 255]")
|
||||||
|
except ValueError as exc:
|
||||||
|
raise ValueError(
|
||||||
|
f"The {i + 1}{th(i + 1)} part {part!r} is not a hexadecimal number in range [0, 255]: {exc}"
|
||||||
|
) from exc
|
||||||
|
result = (result << 8) | part_value
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def to_serial(value: int) -> str:
|
||||||
|
"""
|
||||||
|
Given an integer, converts its absolute value to a colon-separated string of hexadecimal byte values.
|
||||||
|
"""
|
||||||
|
value_str = convert_int_to_hex(value).upper()
|
||||||
|
if len(value_str) % 2 != 0:
|
||||||
|
value_str = f"0{value_str}"
|
||||||
|
return ":".join(value_str[i : i + 2] for i in range(0, len(value_str), 2))
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ("parse_serial", "to_serial")
|
||||||
170
plugins/module_utils/_time.py
Normal file
170
plugins/module_utils/_time.py
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
# Copyright (c) 2024, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import re
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
from ansible_collections.community.crypto.plugins.module_utils._crypto.basic import (
|
||||||
|
OpenSSLObjectError,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
UTC = datetime.timezone.utc
|
||||||
|
|
||||||
|
|
||||||
|
def get_now_datetime(*, with_timezone: bool) -> datetime.datetime:
|
||||||
|
if with_timezone:
|
||||||
|
return datetime.datetime.now(tz=UTC)
|
||||||
|
return datetime.datetime.utcnow()
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_utc_timezone(timestamp: datetime.datetime) -> datetime.datetime:
|
||||||
|
if timestamp.tzinfo is UTC:
|
||||||
|
return timestamp
|
||||||
|
if timestamp.tzinfo is None:
|
||||||
|
# We assume that naive datetime objects use timezone UTC!
|
||||||
|
return timestamp.replace(tzinfo=UTC)
|
||||||
|
return timestamp.astimezone(UTC)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_timezone(timestamp: datetime.datetime) -> datetime.datetime:
|
||||||
|
# Convert to native datetime object
|
||||||
|
if timestamp.tzinfo is None:
|
||||||
|
return timestamp
|
||||||
|
if timestamp.tzinfo is not UTC:
|
||||||
|
timestamp = timestamp.astimezone(UTC)
|
||||||
|
return timestamp.replace(tzinfo=None)
|
||||||
|
|
||||||
|
|
||||||
|
def add_or_remove_timezone(
|
||||||
|
timestamp: datetime.datetime, *, with_timezone: bool
|
||||||
|
) -> datetime.datetime:
|
||||||
|
return (
|
||||||
|
ensure_utc_timezone(timestamp) if with_timezone else remove_timezone(timestamp)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_epoch_seconds(timestamp: datetime.datetime) -> float:
|
||||||
|
if timestamp.tzinfo is None:
|
||||||
|
# timestamp.timestamp() is offset by the local timezone if timestamp has no timezone
|
||||||
|
timestamp = ensure_utc_timezone(timestamp)
|
||||||
|
return timestamp.timestamp()
|
||||||
|
|
||||||
|
|
||||||
|
def from_epoch_seconds(
|
||||||
|
timestamp: int | float, *, with_timezone: bool
|
||||||
|
) -> datetime.datetime:
|
||||||
|
if with_timezone:
|
||||||
|
return datetime.datetime.fromtimestamp(timestamp, UTC)
|
||||||
|
return datetime.datetime.utcfromtimestamp(timestamp)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_relative_to_datetime(
|
||||||
|
relative_time_string: str,
|
||||||
|
*,
|
||||||
|
with_timezone: bool = False,
|
||||||
|
now: datetime.datetime | None = None,
|
||||||
|
) -> datetime.datetime | None:
|
||||||
|
"""Get a datetime.datetime or None from a string in the time format described in sshd_config(5)"""
|
||||||
|
|
||||||
|
parsed_result = re.match(
|
||||||
|
r"^(?P<prefix>[+-])((?P<weeks>\d+)[wW])?((?P<days>\d+)[dD])?((?P<hours>\d+)[hH])?((?P<minutes>\d+)[mM])?((?P<seconds>\d+)[sS]?)?$",
|
||||||
|
relative_time_string,
|
||||||
|
)
|
||||||
|
|
||||||
|
if parsed_result is None or len(relative_time_string) == 1:
|
||||||
|
# not matched or only a single "+" or "-"
|
||||||
|
return None
|
||||||
|
|
||||||
|
offset = datetime.timedelta(0)
|
||||||
|
if parsed_result.group("weeks") is not None:
|
||||||
|
offset += datetime.timedelta(weeks=int(parsed_result.group("weeks")))
|
||||||
|
if parsed_result.group("days") is not None:
|
||||||
|
offset += datetime.timedelta(days=int(parsed_result.group("days")))
|
||||||
|
if parsed_result.group("hours") is not None:
|
||||||
|
offset += datetime.timedelta(hours=int(parsed_result.group("hours")))
|
||||||
|
if parsed_result.group("minutes") is not None:
|
||||||
|
offset += datetime.timedelta(minutes=int(parsed_result.group("minutes")))
|
||||||
|
if parsed_result.group("seconds") is not None:
|
||||||
|
offset += datetime.timedelta(seconds=int(parsed_result.group("seconds")))
|
||||||
|
|
||||||
|
if now is None:
|
||||||
|
now = get_now_datetime(with_timezone=with_timezone)
|
||||||
|
else:
|
||||||
|
now = add_or_remove_timezone(now, with_timezone=with_timezone)
|
||||||
|
|
||||||
|
if parsed_result.group("prefix") == "+":
|
||||||
|
return now + offset
|
||||||
|
return now - offset
|
||||||
|
|
||||||
|
|
||||||
|
def get_relative_time_option(
|
||||||
|
input_string: str,
|
||||||
|
*,
|
||||||
|
input_name: str,
|
||||||
|
with_timezone: bool = False,
|
||||||
|
now: datetime.datetime | None = None,
|
||||||
|
) -> datetime.datetime:
|
||||||
|
"""
|
||||||
|
Return an absolute timespec if a relative timespec or an ASN1 formatted
|
||||||
|
string is provided.
|
||||||
|
|
||||||
|
The return value will be a datetime object.
|
||||||
|
"""
|
||||||
|
result = to_text(input_string)
|
||||||
|
if result is None:
|
||||||
|
raise OpenSSLObjectError(
|
||||||
|
f'The timespec "{input_string}" for {input_name} is not valid'
|
||||||
|
)
|
||||||
|
# Relative time
|
||||||
|
if result.startswith("+") or result.startswith("-"):
|
||||||
|
res = convert_relative_to_datetime(result, with_timezone=with_timezone, now=now)
|
||||||
|
if res is None:
|
||||||
|
raise OpenSSLObjectError(
|
||||||
|
f'The timespec "{input_string}" for {input_name} is invalid'
|
||||||
|
)
|
||||||
|
return res
|
||||||
|
# Absolute time
|
||||||
|
for date_fmt, length in [
|
||||||
|
(
|
||||||
|
"%Y%m%d%H%M%SZ",
|
||||||
|
15,
|
||||||
|
), # this also parses '202401020304Z', but as datetime(2024, 1, 2, 3, 0, 4)
|
||||||
|
("%Y%m%d%H%MZ", 13),
|
||||||
|
(
|
||||||
|
"%Y%m%d%H%M%S%z",
|
||||||
|
14 + 5,
|
||||||
|
), # this also parses '202401020304+0000', but as datetime(2024, 1, 2, 3, 0, 4, tzinfo=...)
|
||||||
|
("%Y%m%d%H%M%z", 12 + 5),
|
||||||
|
]:
|
||||||
|
if len(result) != length:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
res = datetime.datetime.strptime(result, date_fmt)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
return add_or_remove_timezone(res, with_timezone=with_timezone)
|
||||||
|
|
||||||
|
raise OpenSSLObjectError(
|
||||||
|
f'The time spec "{input_string}" for {input_name} is invalid'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"get_now_datetime",
|
||||||
|
"ensure_utc_timezone",
|
||||||
|
"remove_timezone",
|
||||||
|
"add_or_remove_timezone",
|
||||||
|
"get_epoch_seconds",
|
||||||
|
"from_epoch_seconds",
|
||||||
|
"convert_relative_to_datetime",
|
||||||
|
"get_relative_time_option",
|
||||||
|
)
|
||||||
@@ -1,345 +1,15 @@
|
|||||||
# Vendored copy of distutils/version.py from CPython 3.9.5
|
# Copyright (c) 2021, Felix Fontein <felix@fontein.de>
|
||||||
#
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# Implements multiple version numbering conventions for the
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
# Python Module Distribution Utilities.
|
|
||||||
#
|
|
||||||
# Copyright (c) 2001-2022 Python Software Foundation. All rights reserved.
|
|
||||||
# PSF License (see LICENSES/PSF-2.0.txt or https://opensource.org/licenses/Python-2.0)
|
|
||||||
# SPDX-License-Identifier: PSF-2.0
|
|
||||||
#
|
|
||||||
|
|
||||||
"""Provides classes to represent module version numbers (one class for
|
# Note that this module util is **PRIVATE** to the collection. It can have breaking changes at any time.
|
||||||
each style of version numbering). There are currently two such classes
|
# Do not use this from other collections or standalone plugins/modules!
|
||||||
implemented: StrictVersion and LooseVersion.
|
|
||||||
|
|
||||||
Every version number class implements the following interface:
|
"""Provide version object to compare version numbers."""
|
||||||
* the 'parse' method takes a string and parses it to some internal
|
|
||||||
representation; if the string is an invalid version number,
|
|
||||||
'parse' raises a ValueError exception
|
|
||||||
* the class constructor takes an optional string argument which,
|
|
||||||
if supplied, is passed to 'parse'
|
|
||||||
* __str__ reconstructs the string that was passed to 'parse' (or
|
|
||||||
an equivalent string -- ie. one that will generate an equivalent
|
|
||||||
version number instance)
|
|
||||||
* __repr__ generates Python code to recreate the version number instance
|
|
||||||
* _cmp compares the current instance with either another instance
|
|
||||||
of the same class or a string (which will be parsed to an instance
|
|
||||||
of the same class, thus must follow the same rules)
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
from __future__ import annotations
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import re
|
from ansible.module_utils.compat.version import LooseVersion
|
||||||
|
|
||||||
try:
|
|
||||||
RE_FLAGS = re.VERBOSE | re.ASCII
|
|
||||||
except AttributeError:
|
|
||||||
RE_FLAGS = re.VERBOSE
|
|
||||||
|
|
||||||
|
|
||||||
class Version:
|
__all__ = ("LooseVersion",)
|
||||||
"""Abstract base class for version numbering classes. Just provides
|
|
||||||
constructor (__init__) and reproducer (__repr__), because those
|
|
||||||
seem to be the same for all version numbering classes; and route
|
|
||||||
rich comparisons to _cmp.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, vstring=None):
|
|
||||||
if vstring:
|
|
||||||
self.parse(vstring)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "%s ('%s')" % (self.__class__.__name__, str(self))
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
c = self._cmp(other)
|
|
||||||
if c is NotImplemented:
|
|
||||||
return c
|
|
||||||
return c == 0
|
|
||||||
|
|
||||||
def __lt__(self, other):
|
|
||||||
c = self._cmp(other)
|
|
||||||
if c is NotImplemented:
|
|
||||||
return c
|
|
||||||
return c < 0
|
|
||||||
|
|
||||||
def __le__(self, other):
|
|
||||||
c = self._cmp(other)
|
|
||||||
if c is NotImplemented:
|
|
||||||
return c
|
|
||||||
return c <= 0
|
|
||||||
|
|
||||||
def __gt__(self, other):
|
|
||||||
c = self._cmp(other)
|
|
||||||
if c is NotImplemented:
|
|
||||||
return c
|
|
||||||
return c > 0
|
|
||||||
|
|
||||||
def __ge__(self, other):
|
|
||||||
c = self._cmp(other)
|
|
||||||
if c is NotImplemented:
|
|
||||||
return c
|
|
||||||
return c >= 0
|
|
||||||
|
|
||||||
|
|
||||||
# Interface for version-number classes -- must be implemented
|
|
||||||
# by the following classes (the concrete ones -- Version should
|
|
||||||
# be treated as an abstract class).
|
|
||||||
# __init__ (string) - create and take same action as 'parse'
|
|
||||||
# (string parameter is optional)
|
|
||||||
# parse (string) - convert a string representation to whatever
|
|
||||||
# internal representation is appropriate for
|
|
||||||
# this style of version numbering
|
|
||||||
# __str__ (self) - convert back to a string; should be very similar
|
|
||||||
# (if not identical to) the string supplied to parse
|
|
||||||
# __repr__ (self) - generate Python code to recreate
|
|
||||||
# the instance
|
|
||||||
# _cmp (self, other) - compare two version numbers ('other' may
|
|
||||||
# be an unparsed version string, or another
|
|
||||||
# instance of your version class)
|
|
||||||
|
|
||||||
|
|
||||||
class StrictVersion(Version):
|
|
||||||
"""Version numbering for anal retentives and software idealists.
|
|
||||||
Implements the standard interface for version number classes as
|
|
||||||
described above. A version number consists of two or three
|
|
||||||
dot-separated numeric components, with an optional "pre-release" tag
|
|
||||||
on the end. The pre-release tag consists of the letter 'a' or 'b'
|
|
||||||
followed by a number. If the numeric components of two version
|
|
||||||
numbers are equal, then one with a pre-release tag will always
|
|
||||||
be deemed earlier (lesser) than one without.
|
|
||||||
|
|
||||||
The following are valid version numbers (shown in the order that
|
|
||||||
would be obtained by sorting according to the supplied cmp function):
|
|
||||||
|
|
||||||
0.4 0.4.0 (these two are equivalent)
|
|
||||||
0.4.1
|
|
||||||
0.5a1
|
|
||||||
0.5b3
|
|
||||||
0.5
|
|
||||||
0.9.6
|
|
||||||
1.0
|
|
||||||
1.0.4a3
|
|
||||||
1.0.4b1
|
|
||||||
1.0.4
|
|
||||||
|
|
||||||
The following are examples of invalid version numbers:
|
|
||||||
|
|
||||||
1
|
|
||||||
2.7.2.2
|
|
||||||
1.3.a4
|
|
||||||
1.3pl1
|
|
||||||
1.3c4
|
|
||||||
|
|
||||||
The rationale for this version numbering system will be explained
|
|
||||||
in the distutils documentation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$',
|
|
||||||
RE_FLAGS)
|
|
||||||
|
|
||||||
def parse(self, vstring):
|
|
||||||
match = self.version_re.match(vstring)
|
|
||||||
if not match:
|
|
||||||
raise ValueError("invalid version number '%s'" % vstring)
|
|
||||||
|
|
||||||
(major, minor, patch, prerelease, prerelease_num) = \
|
|
||||||
match.group(1, 2, 4, 5, 6)
|
|
||||||
|
|
||||||
if patch:
|
|
||||||
self.version = tuple(map(int, [major, minor, patch]))
|
|
||||||
else:
|
|
||||||
self.version = tuple(map(int, [major, minor])) + (0,)
|
|
||||||
|
|
||||||
if prerelease:
|
|
||||||
self.prerelease = (prerelease[0], int(prerelease_num))
|
|
||||||
else:
|
|
||||||
self.prerelease = None
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
if self.version[2] == 0:
|
|
||||||
vstring = '.'.join(map(str, self.version[0:2]))
|
|
||||||
else:
|
|
||||||
vstring = '.'.join(map(str, self.version))
|
|
||||||
|
|
||||||
if self.prerelease:
|
|
||||||
vstring = vstring + self.prerelease[0] + str(self.prerelease[1])
|
|
||||||
|
|
||||||
return vstring
|
|
||||||
|
|
||||||
def _cmp(self, other):
|
|
||||||
if isinstance(other, str):
|
|
||||||
other = StrictVersion(other)
|
|
||||||
elif not isinstance(other, StrictVersion):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
if self.version != other.version:
|
|
||||||
# numeric versions don't match
|
|
||||||
# prerelease stuff doesn't matter
|
|
||||||
if self.version < other.version:
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
# have to compare prerelease
|
|
||||||
# case 1: neither has prerelease; they're equal
|
|
||||||
# case 2: self has prerelease, other doesn't; other is greater
|
|
||||||
# case 3: self doesn't have prerelease, other does: self is greater
|
|
||||||
# case 4: both have prerelease: must compare them!
|
|
||||||
|
|
||||||
if (not self.prerelease and not other.prerelease):
|
|
||||||
return 0
|
|
||||||
elif (self.prerelease and not other.prerelease):
|
|
||||||
return -1
|
|
||||||
elif (not self.prerelease and other.prerelease):
|
|
||||||
return 1
|
|
||||||
elif (self.prerelease and other.prerelease):
|
|
||||||
if self.prerelease == other.prerelease:
|
|
||||||
return 0
|
|
||||||
elif self.prerelease < other.prerelease:
|
|
||||||
return -1
|
|
||||||
else:
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
raise AssertionError("never get here")
|
|
||||||
|
|
||||||
# end class StrictVersion
|
|
||||||
|
|
||||||
# The rules according to Greg Stein:
|
|
||||||
# 1) a version number has 1 or more numbers separated by a period or by
|
|
||||||
# sequences of letters. If only periods, then these are compared
|
|
||||||
# left-to-right to determine an ordering.
|
|
||||||
# 2) sequences of letters are part of the tuple for comparison and are
|
|
||||||
# compared lexicographically
|
|
||||||
# 3) recognize the numeric components may have leading zeroes
|
|
||||||
#
|
|
||||||
# The LooseVersion class below implements these rules: a version number
|
|
||||||
# string is split up into a tuple of integer and string components, and
|
|
||||||
# comparison is a simple tuple comparison. This means that version
|
|
||||||
# numbers behave in a predictable and obvious way, but a way that might
|
|
||||||
# not necessarily be how people *want* version numbers to behave. There
|
|
||||||
# wouldn't be a problem if people could stick to purely numeric version
|
|
||||||
# numbers: just split on period and compare the numbers as tuples.
|
|
||||||
# However, people insist on putting letters into their version numbers;
|
|
||||||
# the most common purpose seems to be:
|
|
||||||
# - indicating a "pre-release" version
|
|
||||||
# ('alpha', 'beta', 'a', 'b', 'pre', 'p')
|
|
||||||
# - indicating a post-release patch ('p', 'pl', 'patch')
|
|
||||||
# but of course this can't cover all version number schemes, and there's
|
|
||||||
# no way to know what a programmer means without asking him.
|
|
||||||
#
|
|
||||||
# The problem is what to do with letters (and other non-numeric
|
|
||||||
# characters) in a version number. The current implementation does the
|
|
||||||
# obvious and predictable thing: keep them as strings and compare
|
|
||||||
# lexically within a tuple comparison. This has the desired effect if
|
|
||||||
# an appended letter sequence implies something "post-release":
|
|
||||||
# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002".
|
|
||||||
#
|
|
||||||
# However, if letters in a version number imply a pre-release version,
|
|
||||||
# the "obvious" thing isn't correct. Eg. you would expect that
|
|
||||||
# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison
|
|
||||||
# implemented here, this just isn't so.
|
|
||||||
#
|
|
||||||
# Two possible solutions come to mind. The first is to tie the
|
|
||||||
# comparison algorithm to a particular set of semantic rules, as has
|
|
||||||
# been done in the StrictVersion class above. This works great as long
|
|
||||||
# as everyone can go along with bondage and discipline. Hopefully a
|
|
||||||
# (large) subset of Python module programmers will agree that the
|
|
||||||
# particular flavour of bondage and discipline provided by StrictVersion
|
|
||||||
# provides enough benefit to be worth using, and will submit their
|
|
||||||
# version numbering scheme to its domination. The free-thinking
|
|
||||||
# anarchists in the lot will never give in, though, and something needs
|
|
||||||
# to be done to accommodate them.
|
|
||||||
#
|
|
||||||
# Perhaps a "moderately strict" version class could be implemented that
|
|
||||||
# lets almost anything slide (syntactically), and makes some heuristic
|
|
||||||
# assumptions about non-digits in version number strings. This could
|
|
||||||
# sink into special-case-hell, though; if I was as talented and
|
|
||||||
# idiosyncratic as Larry Wall, I'd go ahead and implement a class that
|
|
||||||
# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is
|
|
||||||
# just as happy dealing with things like "2g6" and "1.13++". I don't
|
|
||||||
# think I'm smart enough to do it right though.
|
|
||||||
#
|
|
||||||
# In any case, I've coded the test suite for this module (see
|
|
||||||
# ../test/test_version.py) specifically to fail on things like comparing
|
|
||||||
# "1.2a2" and "1.2". That's not because the *code* is doing anything
|
|
||||||
# wrong, it's because the simple, obvious design doesn't match my
|
|
||||||
# complicated, hairy expectations for real-world version numbers. It
|
|
||||||
# would be a snap to fix the test suite to say, "Yep, LooseVersion does
|
|
||||||
# the Right Thing" (ie. the code matches the conception). But I'd rather
|
|
||||||
# have a conception that matches common notions about version numbers.
|
|
||||||
|
|
||||||
|
|
||||||
class LooseVersion(Version):
|
|
||||||
"""Version numbering for anarchists and software realists.
|
|
||||||
Implements the standard interface for version number classes as
|
|
||||||
described above. A version number consists of a series of numbers,
|
|
||||||
separated by either periods or strings of letters. When comparing
|
|
||||||
version numbers, the numeric components will be compared
|
|
||||||
numerically, and the alphabetic components lexically. The following
|
|
||||||
are all valid version numbers, in no particular order:
|
|
||||||
|
|
||||||
1.5.1
|
|
||||||
1.5.2b2
|
|
||||||
161
|
|
||||||
3.10a
|
|
||||||
8.02
|
|
||||||
3.4j
|
|
||||||
1996.07.12
|
|
||||||
3.2.pl0
|
|
||||||
3.1.1.6
|
|
||||||
2g6
|
|
||||||
11g
|
|
||||||
0.960923
|
|
||||||
2.2beta29
|
|
||||||
1.13++
|
|
||||||
5.5.kw
|
|
||||||
2.0b1pl0
|
|
||||||
|
|
||||||
In fact, there is no such thing as an invalid version number under
|
|
||||||
this scheme; the rules for comparison are simple and predictable,
|
|
||||||
but may not always give the results you want (for some definition
|
|
||||||
of "want").
|
|
||||||
"""
|
|
||||||
|
|
||||||
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
|
|
||||||
|
|
||||||
def __init__(self, vstring=None):
|
|
||||||
if vstring:
|
|
||||||
self.parse(vstring)
|
|
||||||
|
|
||||||
def parse(self, vstring):
|
|
||||||
# I've given up on thinking I can reconstruct the version string
|
|
||||||
# from the parsed tuple -- so I just store the string here for
|
|
||||||
# use by __str__
|
|
||||||
self.vstring = vstring
|
|
||||||
components = [x for x in self.component_re.split(vstring) if x and x != '.']
|
|
||||||
for i, obj in enumerate(components):
|
|
||||||
try:
|
|
||||||
components[i] = int(obj)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
self.version = components
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.vstring
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "LooseVersion ('%s')" % str(self)
|
|
||||||
|
|
||||||
def _cmp(self, other):
|
|
||||||
if isinstance(other, str):
|
|
||||||
other = LooseVersion(other)
|
|
||||||
elif not isinstance(other, LooseVersion):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
if self.version == other.version:
|
|
||||||
return 0
|
|
||||||
if self.version < other.version:
|
|
||||||
return -1
|
|
||||||
if self.version > other.version:
|
|
||||||
return 1
|
|
||||||
|
|
||||||
# end class LooseVersion
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user