Compare commits

..

19 Commits
4.0.2 ... 4.1.0

Author SHA1 Message Date
Felix Fontein
7a70fda784 Release 4.1.0. 2021-11-23 05:52:35 +01:00
patchback[bot]
13d1b9569e terraform: ensuring command options are applied during build_plan (#3726) (#3778)
* Fixes parameters missing in planned state

* Added new line at end of file

* Added changelog fragment for pr 3726

* Added changes mentioned by felixfontein

* Removed blank space for pep8 validation

* Update changelogs/fragments/3726-terraform-missing-parameters-planned-fix.yml

Co-authored-by: Felix Fontein <felix@fontein.de>

* Update plugins/modules/cloud/misc/terraform.py

extend needs to be a list

Co-authored-by: Felix Fontein <felix@fontein.de>

Co-authored-by: Thomas Arringe <thomas.arringe@fouredge.se>
Co-authored-by: Thomas Arringe <Thomas.Arringe@ica.se>
Co-authored-by: Felix Fontein <felix@fontein.de>
(cherry picked from commit 946430e1fb)

Co-authored-by: egnirra <37709886+egnirra@users.noreply.github.com>
2021-11-23 05:49:18 +01:00
patchback[bot]
c8c5021773 pacman: add stdout and stderr as return parameters (#3758) (#3775)
* pacman: add stdout and stderr as return parameters

Following the model of ansible.builtin.apt

* Bugfix to PR: fix documentation formatting

* Add changelog fragment 3758-pacman-add-stdout-stderr.yml

* Apply suggestions from code review

* Update changelogs/fragments/3758-pacman-add-stdout-stderr.yml

Co-authored-by: Felix Fontein <felix@fontein.de>
(cherry picked from commit c2068641f4)

Co-authored-by: Célestin Matte <tohwiq@gmail.com>
2021-11-22 20:01:50 +01:00
patchback[bot]
206ac72bd8 extend open_iscsi to allow rescanning a session to discover new mapped LUN's #3763 (#3765) (#3774)
* <!--- Describe the change below, including rationale and design decisions -->

<!--- HINT: Include "Fixes #nnn" if you are fixing an existing issue -->

According to issue 3767, adding a session rescan flag to add and utilize mapped_luns after login into a portal and target.

<!--- Pick one below and delete the rest -->
- Feature Pull Request

<!--- Write the short name of the module, plugin, task or feature below -->
open_iscsi rescan flag

<!--- Include additional information to help people understand the change here -->
<!--- A step-by-step reproduction of the problem is helpful if there is no related issue -->

<!--- Paste verbatim command output below, e.g. before and after your change -->
``` yaml
      - name: Rescan Targets
        open_iscsi:
          rescan: true
          target: "{{ item.0 }}"
        register: iscsi_rescan
        loop:
          - iqn.1994-05.com.redhat:8c4ea31d28e
        tags:
          - rescan
```
```bash
    TASK [Rescan Targets] ********************************************************************************************************************************************************************
    changed: [node1] => (item=['iqn.1994-05.com.redhat:8c4ea31d28e'])
    changed: [node2] => (item=['iqn.1994-05.com.redhat:8c4ea31d28e'])

    TASK [Output rescan output] **************************************************************************************************************************************************************
    ok: [node1] => {
        "iscsi_rescan": {
            "changed": true,
            "msg": "All items completed",
            "results": [
                {
                    "ansible_loop_var": "item",
                    "changed": true,
                    "failed": false,
                    "invocation": {
                        "module_args": {
                            "auto_node_startup": null,
                            "discover": false,
                            "login": null,
                            "node_auth": "CHAP",
                            "node_pass": null,
                            "node_user": null,
                            "port": "3260",
                            "portal": null,
                            "rescan": true,
                            "show_nodes": false,
                            "target": "iqn.1994-05.com.redhat:8c4ea31d28e'"
                        }
                    },
                    "item": [
                        "iqn.1994-05.com.redhat:8c4ea31d28e"
                    ],
                    "sessions": [
                        "Rescanning session [sid: 3, target: iqn.1994-05.com.redhat:8c4ea31d28e, portal: 127.0.0.1,3260]",
                        "Rescanning session [sid: 1, target: iqn.1994-05.com.redhat:8c4ea31d28e, portal: 127.0.0.2,3260]",
                        "Rescanning session [sid: 2, target: iqn.1994-05.com.redhat:8c4ea31d28e, portal: 127.0.0.3,3260]",
                        ""
                    ]
                }
            ]
        }
    }
    ok: [node2] => {
        "iscsi_rescan": {
            "changed": true,
            "msg": "All items completed",
            "results": [
                {
                    "ansible_loop_var": "item",
                    "changed": true,
                    "failed": false,
                    "invocation": {
                        "module_args": {
                            "auto_node_startup": null,
                            "discover": false,
                            "login": null,
                            "node_auth": "CHAP",
                            "node_pass": null,
                            "node_user": null,
                            "port": "3260",
                            "portal": null,
                            "rescan": true,
                            "show_nodes": false,
                            "target": "iqn.1994-05.com.redhat:8c4ea31d28e"
                        }
                    },
                    "item": [
                        "iqn.1994-05.com.redhat:8c4ea31d28e"
                    ],
                    "sessions": [
                        "Rescanning session [sid: 3, target: iqn.1994-05.com.redhat:8c4ea31d28e, portal: 127.0.0.1,3260]",
                        "Rescanning session [sid: 2, target: iqn.1994-05.com.redhat:8c4ea31d28e, portal: 127.0.0.2,3260]",
                        "Rescanning session [sid: 1, target: iqn.1994-05.com.redhat:8c4ea31d28e, portal: 127.0.0.3,3260]",
                        ""
                    ]
                }
            ]
        }
    }
```

* minor_changes:
  - open_iscsi - extended module to allow rescanning of established session for one or all targets. (https://github.com/ansible-collections/community.general/issues/3763)

* * fixed commend according to the recommendation.

* Update plugins/modules/system/open_iscsi.py

Co-authored-by: Felix Fontein <felix@fontein.de>
(cherry picked from commit 921417c4b5)

Co-authored-by: Michaela Lang <94735640+michaelalang@users.noreply.github.com>
2021-11-22 19:50:44 +01:00
patchback[bot]
1ee123bb1e Xen orchestra inventory: Added groups, keyed_groups and compose support (#3766) (#3772)
* Xen orchestra inventory: Added groups, keyed_groups and compose support

* Update plugins/inventory/xen_orchestra.py

Remove extra params declaration

Co-authored-by: Felix Fontein <felix@fontein.de>

Co-authored-by: Felix Fontein <felix@fontein.de>
(cherry picked from commit 336f9465cb)

Co-authored-by: Samori Gorse <samori@codeinstyle.io>
2021-11-22 19:27:00 +01:00
patchback[bot]
aa0bcad9df RevBits PAM Secret Server Plugin (#3405) (#3771)
* RevBits PAM Secret Server Plugin

* Update revbitspss.py

* Update plugins/lookup/revbitspss.py

Co-authored-by: Felix Fontein <felix@fontein.de>

* Update plugins/lookup/revbitspss.py

Co-authored-by: Felix Fontein <felix@fontein.de>

* Update plugins/lookup/revbitspss.py

Co-authored-by: Felix Fontein <felix@fontein.de>

* Update plugins/lookup/revbitspss.py

Co-authored-by: Felix Fontein <felix@fontein.de>

* Update plugins/lookup/revbitspss.py

Co-authored-by: Felix Fontein <felix@fontein.de>

* Update plugins/lookup/revbitspss.py

Co-authored-by: Felix Fontein <felix@fontein.de>

* Fixes based on feedback from Ansible

* Fixes for auto tests

* module updated

* f string changed

* maintainer added

* maintainer added

* maintainer added

* review updates

* test added

* test added

* test added

* revisions updtes

* revisions updtes

* revisions updtes

* file removed

* unit test added

* suggestions updated

* suggestions updated

* Update plugins/lookup/revbitspss.py

* Update plugins/lookup/revbitspss.py

* Update plugins/lookup/revbitspss.py

Co-authored-by: Felix Fontein <felix@fontein.de>
Co-authored-by: Zubair Hassan <zubair.hassan@invozone.com>
Co-authored-by: Alexei Znamensky <103110+russoz@users.noreply.github.com>
(cherry picked from commit 25e80762aa)

Co-authored-by: RevBits, LLC <74629760+RevBits@users.noreply.github.com>
2021-11-22 19:26:48 +01:00
patchback[bot]
6e51690a95 Support IPMI encryption key parameter in ipmi_boot (#3702) (#3770)
* Support IPMI encryption key parameter in ipmi_boot

* Support py2 on hex parsing, error handling

Change parsing hex string to support python2 and add error handling to it based on feedback.

* Don't explicitly set required to false

* Add version_added to key arg

* Add changelog fragment

* Add IPMI encryption key arg to ipmi_power

* Fix the formatting of changelog fragment

(cherry picked from commit 4013d0c9ca)

Co-authored-by: bluikko <14869000+bluikko@users.noreply.github.com>
2021-11-22 13:49:54 +01:00
patchback[bot]
3eab4faf0b Bugfix: github_repo does not apply defaults on existing repos (#2386) (#3769)
* github_repo do not apply defaults on currently existing repos

* Fixed sanity

* Fixed doc defaults

* Added changelog

* Fix "or" statement and some formatting

* Improve description change check

* Added api_url parameter for unit tests and default values for private and description parameters

* Added force_defaults parameter

* Improved docs

* Fixed doc anchors for force_defaults parameter

* Update plugins/modules/source_control/github/github_repo.py

Co-authored-by: Felix Fontein <felix@fontein.de>

Co-authored-by: Felix Fontein <felix@fontein.de>
(cherry picked from commit 17c3708f31)

Co-authored-by: Álvaro Torres Cogollo <atorrescogollo@gmail.com>
2021-11-22 13:49:29 +01:00
Felix Fontein
c6589a772b Prepare 4.1.0 release. 2021-11-20 09:16:49 +01:00
patchback[bot]
cb06c4ff77 [PR #3344/fef02c0f backport][stable-4] Xen orchestra inventory plugin (#3760)
* Xen orchestra inventory plugin (#3344)

* wip

* Renamed xo env variable with ANSIBLE prefix

* Suppress 3.x import and boilerplate errors

* Added shinuza as maintainer

* Do not use automatic field numbering spec

* Removed f string

* Fixed sanity checks

* wip tests

* Added working tests

* Fixed a bug when login fails

* Update plugins/inventory/xen_orchestra.py

Co-authored-by: Felix Fontein <felix@fontein.de>

Co-authored-by: Felix Fontein <felix@fontein.de>
(cherry picked from commit fef02c0fba)

* Replace usage of packaging.version with distutils.version.LooseVersion. (#3762)

(cherry picked from commit 08067f08df)

Co-authored-by: Samori Gorse <samori@codeinstyle.io>
Co-authored-by: Felix Fontein <felix@fontein.de>
2021-11-20 09:14:01 +01:00
patchback[bot]
78316fbb75 lxd_container: support lxd instance types (#3661) (#3761)
* lxd_container: support lxd instance types

Update the lxd_container module to enable the new LXD API endpoint,
which supports different types of instances, such as containers and virtual machines.
The type attributes can be set explicitly to create containers or virtual machines.

* lxd_container: rename references from containers to instances

* lxd_container: add an example of creating vms

* lxd_container: update doc

* lxd_container: fix pylint

* resolve converstation

* remove type from config

* remove outdated validation related to the instance api

* correct diff

* changing last bits

* add missing dot

(cherry picked from commit 58eb94fff3)

Co-authored-by: rchicoli <rchicoli@users.noreply.github.com>
2021-11-20 08:36:04 +01:00
patchback[bot]
c7df82652f change ip4 type to list of str (#3738) (#3757)
* change ip4 type to list of str

* Add several tests and change documentation

* Update changelogs/fragments/1088-nmcli_add_multiple_addresses_support.yml

Co-authored-by: Andrew Pantuso <ajpantuso@gmail.com>

Co-authored-by: Andrew Pantuso <ajpantuso@gmail.com>
(cherry picked from commit 50c2f3a97d)

Co-authored-by: Alex Groshev <38885591+haddystuff@users.noreply.github.com>
2021-11-19 07:27:36 +01:00
patchback[bot]
342a1a7faa Fix collection dependency installation in CI. (#3753) (#3756)
(cherry picked from commit 17b4c6972f)

Co-authored-by: Felix Fontein <felix@fontein.de>
2021-11-19 07:25:26 +01:00
patchback[bot]
17431bd42c CI: Replace RHEL 8.4 by RHEL 8.5 for devel (#3747) (#3749)
* Replace RHEL 8.4 by RHEL 8.5 for devel.

* Install virtualenv.

* Revert "Install virtualenv."

This reverts commit 22ba0d074e.

* Just do another skip...

(cherry picked from commit 26c7995c82)

Co-authored-by: Felix Fontein <felix@fontein.de>
2021-11-17 22:22:46 +01:00
patchback[bot]
eacf663999 listen_ports_facts: Added support for ss (#3708) (#3744)
(cherry picked from commit 245cee0ece)

Co-authored-by: Jan Gaßner <40096303+moonrail@users.noreply.github.com>
2021-11-16 20:06:56 +01:00
patchback[bot]
84a806be08 Add GetHostInterfaces command to redfish_info (#3693) (#3743)
* Add GetHostInterfaces command to redfish_info

Adding a GetHostInterfaces command to redfish_info in order to report the
following:
- Properties about the HostInterface(s) like Status, InterfaceEnabled, etc
- ManagerEthernetInterface (info on BMC -> host NIC)
- HostEthernetInterfaces (list of NICs for host -> BMC connectivity)

fixes #3692

* add fragment

* fixup for linter

* redfish_utils.py cleanup

- Remove unneeded Properties list from get_nic_inventory()
- Remove bogus key variable from get_hostinterfaces()
- Add additional Properties to collect from HostInterface objects

* fixup for stray deletion

(cherry picked from commit 98cca3c19c)

Co-authored-by: Jacob <jyundt@gmail.com>
2021-11-16 20:06:28 +01:00
patchback[bot]
7db5c86dc8 gitlab: clean up modules and utils (#3694) (#3741)
* gitlab: remove dead code in module_utils

* gitlab: use snake_case consistently in methods and functions

* gitlab: use snake_case consistently in variables

* gitlab: fix pep8 indentation issues

* gitlab: add changelog fragment

* gitlab: apply suggestions from code review

Co-authored-by: Alexei Znamensky <103110+russoz@users.noreply.github.com>
Co-authored-by: Chris Frage <git@sh0shin.org>

* gitlab: use consistent indentation

Co-authored-by: Alexei Znamensky <103110+russoz@users.noreply.github.com>
Co-authored-by: Chris Frage <git@sh0shin.org>
(cherry picked from commit d29aecad26)

Co-authored-by: Nejc Habjan <hab.nejc@gmail.com>
2021-11-16 19:45:45 +01:00
patchback[bot]
1e82b5c580 redfish_config: Add support to configure Redfish Host Interface (#3632) (#3718)
* redfish_config: Add support to configure Redfish Host Interface

Adding another Manager command to redfish_config in order to set Redfish
Host Interface properties.

Fixes #3631

* add fragment

* fixup for fragment filename

* Update plugins/modules/remote_management/redfish/redfish_config.py

Co-authored-by: Felix Fontein <felix@fontein.de>

* Add support for specifying HostInterface resource ID

* Apply suggestions from code review

Co-authored-by: Felix Fontein <felix@fontein.de>

* Update plugins/modules/remote_management/redfish/redfish_config.py

Co-authored-by: Felix Fontein <felix@fontein.de>

* Update changelogs/fragments/3632-add-redfish-host-interface-config-support.yml

Co-authored-by: Felix Fontein <felix@fontein.de>

Co-authored-by: Felix Fontein <felix@fontein.de>
(cherry picked from commit 6f47ddc29f)

Co-authored-by: Jacob <jyundt@gmail.com>
2021-11-16 19:45:30 +01:00
Felix Fontein
7f6be665f9 Next expected release is 4.1.0. 2021-11-16 09:10:42 +01:00
44 changed files with 2058 additions and 576 deletions

View File

@@ -209,8 +209,8 @@ stages:
test: macos/11.1
- name: RHEL 7.9
test: rhel/7.9
- name: RHEL 8.4
test: rhel/8.4
- name: RHEL 8.5
test: rhel/8.5
- name: FreeBSD 12.2
test: freebsd/12.2
- name: FreeBSD 13.0

4
.github/BOTMETA.yml vendored
View File

@@ -163,6 +163,8 @@ files:
keywords: opennebula dynamic inventory script
$inventories/proxmox.py:
maintainers: $team_virt ilijamt
$inventories/xen_orchestra.py:
maintainers: shinuza
$inventories/icinga2.py:
maintainers: bongoeadgc6
$inventories/scaleway.py:
@@ -223,6 +225,8 @@ files:
maintainers: konstruktoid
$lookups/redis.py:
maintainers: $team_ansible_core jpmens
$lookups/revbitspss.py:
maintainers: RevBits
$lookups/shelvefile.py: {}
$lookups/tss.py:
maintainers: amigus endlesstrax

View File

@@ -6,6 +6,47 @@ Community General Release Notes
This changelog describes changes after version 3.0.0.
v4.1.0
======
Release Summary
---------------
Regular bugfix and feature release.
Minor Changes
-------------
- gitlab - clean up modules and utils (https://github.com/ansible-collections/community.general/pull/3694).
- ipmi_boot - add support for user-specified IPMI encryption key (https://github.com/ansible-collections/community.general/issues/3698).
- ipmi_power - add support for user-specified IPMI encryption key (https://github.com/ansible-collections/community.general/issues/3698).
- listen_ports_facts - add support for ``ss`` command besides ``netstat`` (https://github.com/ansible-collections/community.general/pull/3708).
- lxd_container - adds ``type`` option which also allows to operate on virtual machines and not just containers (https://github.com/ansible-collections/community.general/pull/3661).
- nmcli - add multiple addresses support for ``ip4`` parameter (https://github.com/ansible-collections/community.general/issues/1088, https://github.com/ansible-collections/community.general/pull/3738).
- open_iscsi - extended module to allow rescanning of established session for one or all targets (https://github.com/ansible-collections/community.general/issues/3763).
- pacman - add ``stdout`` and ``stderr`` as return values (https://github.com/ansible-collections/community.general/pull/3758).
- redfish_command - add ``GetHostInterfaces`` command to enable reporting Redfish Host Interface information (https://github.com/ansible-collections/community.general/issues/3693).
- redfish_command - add ``SetHostInterface`` command to enable configuring the Redfish Host Interface (https://github.com/ansible-collections/community.general/issues/3632).
Bugfixes
--------
- github_repo - ``private`` and ``description`` attributes should not be set to default values when the repo already exists (https://github.com/ansible-collections/community.general/pull/2386).
- terraform - fix command options being ignored during planned/plan in function ``build_plan`` such as ``lock`` or ``lock_timeout`` (https://github.com/ansible-collections/community.general/issues/3707, https://github.com/ansible-collections/community.general/pull/3726).
New Plugins
-----------
Inventory
~~~~~~~~~
- xen_orchestra - Xen Orchestra inventory source
Lookup
~~~~~~
- revbitspss - Get secrets from RevBits PAM server
v4.0.2
======

View File

@@ -1069,3 +1069,51 @@ releases:
- 4.0.2.yml
- deprecate-ansible-2.9-2.10.yml
release_date: '2021-11-16'
4.1.0:
changes:
bugfixes:
- github_repo - ``private`` and ``description`` attributes should not be set
to default values when the repo already exists (https://github.com/ansible-collections/community.general/pull/2386).
- terraform - fix command options being ignored during planned/plan in function
``build_plan`` such as ``lock`` or ``lock_timeout`` (https://github.com/ansible-collections/community.general/issues/3707,
https://github.com/ansible-collections/community.general/pull/3726).
minor_changes:
- gitlab - clean up modules and utils (https://github.com/ansible-collections/community.general/pull/3694).
- ipmi_boot - add support for user-specified IPMI encryption key (https://github.com/ansible-collections/community.general/issues/3698).
- ipmi_power - add support for user-specified IPMI encryption key (https://github.com/ansible-collections/community.general/issues/3698).
- listen_ports_facts - add support for ``ss`` command besides ``netstat`` (https://github.com/ansible-collections/community.general/pull/3708).
- lxd_container - adds ``type`` option which also allows to operate on virtual
machines and not just containers (https://github.com/ansible-collections/community.general/pull/3661).
- nmcli - add multiple addresses support for ``ip4`` parameter (https://github.com/ansible-collections/community.general/issues/1088,
https://github.com/ansible-collections/community.general/pull/3738).
- open_iscsi - extended module to allow rescanning of established session for
one or all targets (https://github.com/ansible-collections/community.general/issues/3763).
- pacman - add ``stdout`` and ``stderr`` as return values (https://github.com/ansible-collections/community.general/pull/3758).
- redfish_command - add ``GetHostInterfaces`` command to enable reporting Redfish
Host Interface information (https://github.com/ansible-collections/community.general/issues/3693).
- redfish_command - add ``SetHostInterface`` command to enable configuring the
Redfish Host Interface (https://github.com/ansible-collections/community.general/issues/3632).
release_summary: Regular bugfix and feature release.
fragments:
- 1088-nmcli_add_multiple_addresses_support.yml
- 2386-github_repo-fix-idempotency-issues.yml
- 3632-add-redfish-host-interface-config-support.yml
- 3661-lxd_container-add-vm-support.yml
- 3693-add-redfish-host-interface-info-support.yml
- 3694-gitlab-cleanup.yml
- 3702-ipmi-encryption-key.yml
- 3708-listen_ports_facts-add-ss-support.yml
- 3726-terraform-missing-parameters-planned-fix.yml
- 3758-pacman-add-stdout-stderr.yml
- 3765-extend-open_iscsi-with-rescan.yml
- 4.1.0.yml
plugins:
inventory:
- description: Xen Orchestra inventory source
name: xen_orchestra
namespace: null
lookup:
- description: Get secrets from RevBits PAM server
name: revbitspss
namespace: null
release_date: '2021-11-23'

View File

@@ -1,6 +1,6 @@
namespace: community
name: general
version: 4.0.2
version: 4.1.0
readme: README.md
authors:
- Ansible (https://github.com/ansible)

View File

@@ -0,0 +1,328 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2021 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
name: xen_orchestra
short_description: Xen Orchestra inventory source
version_added: 4.1.0
author:
- Dom Del Nano (@ddelnano) <ddelnano@gmail.com>
- Samori Gorse (@shinuza) <samorigorse@gmail.com>
requirements:
- websocket-client >= 1.0.0
description:
- Get inventory hosts from a Xen Orchestra deployment.
- 'Uses a configuration file as an inventory source, it must end in C(.xen_orchestra.yml) or C(.xen_orchestra.yaml).'
extends_documentation_fragment:
- constructed
- inventory_cache
options:
plugin:
description: The name of this plugin, it should always be set to C(community.general.xen_orchestra) for this plugin to recognize it as its own.
required: yes
choices: ['community.general.xen_orchestra']
type: str
api_host:
description:
- API host to XOA API.
- If the value is not specified in the inventory configuration, the value of environment variable C(ANSIBLE_XO_HOST) will be used instead.
type: str
env:
- name: ANSIBLE_XO_HOST
user:
description:
- Xen Orchestra user.
- If the value is not specified in the inventory configuration, the value of environment variable C(ANSIBLE_XO_USER) will be used instead.
required: yes
type: str
env:
- name: ANSIBLE_XO_USER
password:
description:
- Xen Orchestra password.
- If the value is not specified in the inventory configuration, the value of environment variable C(ANSIBLE_XO_PASSWORD) will be used instead.
required: yes
type: str
env:
- name: ANSIBLE_XO_PASSWORD
validate_certs:
description: Verify TLS certificate if using HTTPS.
type: boolean
default: true
use_ssl:
description: Use wss when connecting to the Xen Orchestra API
type: boolean
default: true
'''
EXAMPLES = '''
# file must be named xen_orchestra.yaml or xen_orchestra.yml
simple_config_file:
plugin: community.general.xen_orchestra
api_host: 192.168.1.255
user: xo
password: xo_pwd
validate_certs: true
use_ssl: true
groups:
kube_nodes: "'kube_node' in tags"
compose:
ansible_port: 2222
'''
import json
import ssl
from distutils.version import LooseVersion
from ansible.errors import AnsibleError
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable, Cacheable
# 3rd party imports
try:
HAS_WEBSOCKET = True
import websocket
from websocket import create_connection
if LooseVersion(websocket.__version__) <= LooseVersion('1.0.0'):
raise ImportError
except ImportError as e:
HAS_WEBSOCKET = False
HALTED = 'Halted'
PAUSED = 'Paused'
RUNNING = 'Running'
SUSPENDED = 'Suspended'
POWER_STATES = [RUNNING, HALTED, SUSPENDED, PAUSED]
HOST_GROUP = 'xo_hosts'
POOL_GROUP = 'xo_pools'
def clean_group_name(label):
return label.lower().replace(' ', '-').replace('-', '_')
class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
''' Host inventory parser for ansible using XenOrchestra as source. '''
NAME = 'community.general.xen_orchestra'
def __init__(self):
super(InventoryModule, self).__init__()
# from config
self.counter = -1
self.session = None
self.cache_key = None
self.use_cache = None
@property
def pointer(self):
self.counter += 1
return self.counter
def create_connection(self, xoa_api_host):
validate_certs = self.get_option('validate_certs')
use_ssl = self.get_option('use_ssl')
proto = 'wss' if use_ssl else 'ws'
sslopt = None if validate_certs else {'cert_reqs': ssl.CERT_NONE}
self.conn = create_connection(
'{0}://{1}/api/'.format(proto, xoa_api_host), sslopt=sslopt)
def login(self, user, password):
payload = {'id': self.pointer, 'jsonrpc': '2.0', 'method': 'session.signIn', 'params': {
'username': user, 'password': password}}
self.conn.send(json.dumps(payload))
result = json.loads(self.conn.recv())
if 'error' in result:
raise AnsibleError(
'Could not connect: {0}'.format(result['error']))
def get_object(self, name):
payload = {'id': self.pointer, 'jsonrpc': '2.0',
'method': 'xo.getAllObjects', 'params': {'filter': {'type': name}}}
self.conn.send(json.dumps(payload))
answer = json.loads(self.conn.recv())
if 'error' in answer:
raise AnsibleError(
'Could not request: {0}'.format(answer['error']))
return answer['result']
def _get_objects(self):
self.create_connection(self.xoa_api_host)
self.login(self.xoa_user, self.xoa_password)
return {
'vms': self.get_object('VM'),
'pools': self.get_object('pool'),
'hosts': self.get_object('host'),
}
def _apply_constructable(self, name, variables):
strict = self.get_option('strict')
self._add_host_to_composed_groups(self.get_option('groups'), variables, name, strict=strict)
self._add_host_to_keyed_groups(self.get_option('keyed_groups'), variables, name, strict=strict)
self._set_composite_vars(self.get_option('compose'), variables, name, strict=strict)
def _add_vms(self, vms, hosts, pools):
for uuid, vm in vms.items():
group = 'with_ip'
ip = vm.get('mainIpAddress')
entry_name = uuid
power_state = vm['power_state'].lower()
pool_name = self._pool_group_name_for_uuid(pools, vm['$poolId'])
host_name = self._host_group_name_for_uuid(hosts, vm['$container'])
self.inventory.add_host(entry_name)
# Grouping by power state
self.inventory.add_child(power_state, entry_name)
# Grouping by host
if host_name:
self.inventory.add_child(host_name, entry_name)
# Grouping by pool
if pool_name:
self.inventory.add_child(pool_name, entry_name)
# Grouping VMs with an IP together
if ip is None:
group = 'without_ip'
self.inventory.add_group(group)
self.inventory.add_child(group, entry_name)
# Adding meta
self.inventory.set_variable(entry_name, 'uuid', uuid)
self.inventory.set_variable(entry_name, 'ip', ip)
self.inventory.set_variable(entry_name, 'ansible_host', ip)
self.inventory.set_variable(entry_name, 'power_state', power_state)
self.inventory.set_variable(
entry_name, 'name_label', vm['name_label'])
self.inventory.set_variable(entry_name, 'type', vm['type'])
self.inventory.set_variable(
entry_name, 'cpus', vm['CPUs']['number'])
self.inventory.set_variable(entry_name, 'tags', vm['tags'])
self.inventory.set_variable(
entry_name, 'memory', vm['memory']['size'])
self.inventory.set_variable(
entry_name, 'has_ip', group == 'with_ip')
self.inventory.set_variable(
entry_name, 'is_managed', vm.get('managementAgentDetected', False))
self.inventory.set_variable(
entry_name, 'os_version', vm['os_version'])
self._apply_constructable(entry_name, self.inventory.get_host(entry_name).get_vars())
def _add_hosts(self, hosts, pools):
for host in hosts.values():
entry_name = host['uuid']
group_name = 'xo_host_{0}'.format(
clean_group_name(host['name_label']))
pool_name = self._pool_group_name_for_uuid(pools, host['$poolId'])
self.inventory.add_group(group_name)
self.inventory.add_host(entry_name)
self.inventory.add_child(HOST_GROUP, entry_name)
self.inventory.add_child(pool_name, entry_name)
self.inventory.set_variable(entry_name, 'enabled', host['enabled'])
self.inventory.set_variable(
entry_name, 'hostname', host['hostname'])
self.inventory.set_variable(entry_name, 'memory', host['memory'])
self.inventory.set_variable(entry_name, 'address', host['address'])
self.inventory.set_variable(entry_name, 'cpus', host['cpus'])
self.inventory.set_variable(entry_name, 'type', 'host')
self.inventory.set_variable(entry_name, 'tags', host['tags'])
self.inventory.set_variable(entry_name, 'version', host['version'])
self.inventory.set_variable(
entry_name, 'power_state', host['power_state'].lower())
self.inventory.set_variable(
entry_name, 'product_brand', host['productBrand'])
for pool in pools.values():
group_name = 'xo_pool_{0}'.format(
clean_group_name(pool['name_label']))
self.inventory.add_group(group_name)
def _add_pools(self, pools):
for pool in pools.values():
group_name = 'xo_pool_{0}'.format(
clean_group_name(pool['name_label']))
self.inventory.add_group(group_name)
# TODO: Refactor
def _pool_group_name_for_uuid(self, pools, pool_uuid):
for pool in pools:
if pool == pool_uuid:
return 'xo_pool_{0}'.format(
clean_group_name(pools[pool_uuid]['name_label']))
# TODO: Refactor
def _host_group_name_for_uuid(self, hosts, host_uuid):
for host in hosts:
if host == host_uuid:
return 'xo_host_{0}'.format(
clean_group_name(hosts[host_uuid]['name_label']
))
def _populate(self, objects):
# Prepare general groups
self.inventory.add_group(HOST_GROUP)
self.inventory.add_group(POOL_GROUP)
for group in POWER_STATES:
self.inventory.add_group(group.lower())
self._add_pools(objects['pools'])
self._add_hosts(objects['hosts'], objects['pools'])
self._add_vms(objects['vms'], objects['hosts'], objects['pools'])
def verify_file(self, path):
valid = False
if super(InventoryModule, self).verify_file(path):
if path.endswith(('xen_orchestra.yaml', 'xen_orchestra.yml')):
valid = True
else:
self.display.vvv(
'Skipping due to inventory source not ending in "xen_orchestra.yaml" nor "xen_orchestra.yml"')
return valid
def parse(self, inventory, loader, path, cache=True):
if not HAS_WEBSOCKET:
raise AnsibleError('This plugin requires websocket-client 1.0.0 or higher: '
'https://github.com/websocket-client/websocket-client.')
super(InventoryModule, self).parse(inventory, loader, path)
# read config from file, this sets 'options'
self._read_config_data(path)
self.inventory = inventory
self.protocol = 'wss'
self.xoa_api_host = self.get_option('api_host')
self.xoa_user = self.get_option('user')
self.xoa_password = self.get_option('password')
self.cache_key = self.get_cache_key(path)
self.use_cache = cache and self.get_option('cache')
self.validate_certs = self.get_option('validate_certs')
if not self.get_option('use_ssl'):
self.protocol = 'ws'
objects = self._get_objects()
self._populate(objects)

View File

@@ -0,0 +1,107 @@
# -*- coding: utf-8 -*-
# Copyright: (c) 2021, RevBits <info@revbits.com>
# GNU General Public License v3.0+ (see COPYING or
# https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r"""
name: revbitspss
author: RevBits (@RevBits) <info@revbits.com>
short_description: Get secrets from RevBits PAM server
version_added: 4.1.0
description:
- Uses the revbits_ansible Python SDK to get Secrets from RevBits PAM
Server using API key authentication with the REST API.
requirements:
- revbits_ansible - U(https://pypi.org/project/revbits_ansible/)
options:
_terms:
description:
- This will be an array of keys for secrets which you want to fetch from RevBits PAM.
required: true
type: list
elements: string
base_url:
description:
- This will be the base URL of the server, for example C(https://server-url-here).
required: true
type: string
api_key:
description:
- This will be the API key for authentication. You can get it from the RevBits PAM secret manager module.
required: true
type: string
"""
RETURN = r"""
_list:
description:
- The JSON responses which you can access with defined keys.
- If you are fetching secrets named as UUID, PASSWORD it will gives you the dict of all secrets.
type: list
elements: dict
"""
EXAMPLES = r"""
- hosts: localhost
vars:
secret: >-
{{
lookup(
'community.general.revbitspss',
'UUIDPAM', 'DB_PASS',
base_url='https://server-url-here',
api_key='API_KEY_GOES_HERE'
)
}}
tasks:
- ansible.builtin.debug:
msg: >
UUIDPAM is {{ (secret['UUIDPAM']) }} and DB_PASS is {{ (secret['DB_PASS']) }}
"""
from ansible.plugins.lookup import LookupBase
from ansible.utils.display import Display
from ansible.errors import AnsibleError
from ansible.module_utils.six import raise_from
try:
from pam.revbits_ansible.server import SecretServer
except ImportError as imp_exc:
ANOTHER_LIBRARY_IMPORT_ERROR = imp_exc
else:
ANOTHER_LIBRARY_IMPORT_ERROR = None
display = Display()
class LookupModule(LookupBase):
@staticmethod
def Client(server_parameters):
return SecretServer(**server_parameters)
def run(self, terms, variables, **kwargs):
if ANOTHER_LIBRARY_IMPORT_ERROR:
raise_from(
AnsibleError('revbits_ansible must be installed to use this plugin'),
ANOTHER_LIBRARY_IMPORT_ERROR
)
self.set_options(var_options=variables, direct=kwargs)
secret_server = LookupModule.Client(
{
"base_url": self.get_option('base_url'),
"api_key": self.get_option('api_key'),
}
)
result = []
for term in terms:
try:
display.vvv(u"Secret Server lookup of Secret with ID %s" % term)
result.append({term: secret_server.get_pam_secret(term)})
except Exception as error:
raise AnsibleError("Secret Server lookup failure: %s" % error.message)
return result

View File

@@ -30,31 +30,7 @@ except Exception:
HAS_GITLAB_PACKAGE = False
def request(module, api_url, project, path, access_token, private_token, rawdata='', method='GET'):
url = "%s/v4/projects/%s%s" % (api_url, quote_plus(project), path)
headers = {}
if access_token:
headers['Authorization'] = "Bearer %s" % access_token
else:
headers['Private-Token'] = private_token
headers['Accept'] = "application/json"
headers['Content-Type'] = "application/json"
response, info = fetch_url(module=module, url=url, headers=headers, data=rawdata, method=method)
status = info['status']
content = ""
if response:
content = response.read()
if status == 204:
return True, content
elif status == 200 or status == 201:
return True, json.loads(content)
else:
return False, str(status) + ": " + content
def findProject(gitlab_instance, identifier):
def find_project(gitlab_instance, identifier):
try:
project = gitlab_instance.projects.get(identifier)
except Exception as e:
@@ -67,7 +43,7 @@ def findProject(gitlab_instance, identifier):
return project
def findGroup(gitlab_instance, identifier):
def find_group(gitlab_instance, identifier):
try:
project = gitlab_instance.groups.get(identifier)
except Exception as e:
@@ -76,7 +52,7 @@ def findGroup(gitlab_instance, identifier):
return project
def gitlabAuthentication(module):
def gitlab_authentication(module):
gitlab_url = module.params['api_url']
validate_certs = module.params['validate_certs']
gitlab_user = module.params['api_username']

View File

@@ -2029,15 +2029,28 @@ class RedfishUtils(object):
def get_multi_memory_inventory(self):
return self.aggregate_systems(self.get_memory_inventory)
def get_nic(self, resource_uri):
result = {}
properties = ['Name', 'Id', 'Description', 'FQDN', 'IPv4Addresses', 'IPv6Addresses',
'NameServers', 'MACAddress', 'PermanentMACAddress',
'SpeedMbps', 'MTUSize', 'AutoNeg', 'Status']
response = self.get_request(self.root_uri + resource_uri)
if response['ret'] is False:
return response
result['ret'] = True
data = response['data']
nic = {}
for property in properties:
if property in data:
nic[property] = data[property]
result['entries'] = nic
return(result)
def get_nic_inventory(self, resource_uri):
result = {}
nic_list = []
nic_results = []
key = "EthernetInterfaces"
# Get these entries, but does not fail if not found
properties = ['Name', 'Id', 'Description', 'FQDN', 'IPv4Addresses', 'IPv6Addresses',
'NameServers', 'MACAddress', 'PermanentMACAddress',
'SpeedMbps', 'MTUSize', 'AutoNeg', 'Status']
response = self.get_request(self.root_uri + resource_uri)
if response['ret'] is False:
@@ -2061,18 +2074,9 @@ class RedfishUtils(object):
nic_list.append(nic[u'@odata.id'])
for n in nic_list:
nic = {}
uri = self.root_uri + n
response = self.get_request(uri)
if response['ret'] is False:
return response
data = response['data']
for property in properties:
if property in data:
nic[property] = data[property]
nic_results.append(nic)
nic = self.get_nic(n)
if nic['ret']:
nic_results.append(nic['entries'])
result["entries"] = nic_results
return result
@@ -2792,3 +2796,164 @@ class RedfishUtils(object):
if response['ret'] is False:
return response
return {'ret': True, 'changed': True, 'msg': "Modified Manager NIC"}
def set_hostinterface_attributes(self, hostinterface_config, hostinterface_id=None):
response = self.get_request(self.root_uri + self.manager_uri)
if response['ret'] is False:
return response
data = response['data']
if 'HostInterfaces' not in data:
return {'ret': False, 'msg': "HostInterfaces resource not found"}
hostinterfaces_uri = data["HostInterfaces"]["@odata.id"]
response = self.get_request(self.root_uri + hostinterfaces_uri)
if response['ret'] is False:
return response
data = response['data']
uris = [a.get('@odata.id') for a in data.get('Members', []) if a.get('@odata.id')]
# Capture list of URIs that match a specified HostInterface resource ID
if hostinterface_id:
matching_hostinterface_uris = [uri for uri in uris if hostinterface_id in uri.split('/')[-1]]
if hostinterface_id and matching_hostinterface_uris:
hostinterface_uri = list.pop(matching_hostinterface_uris)
elif hostinterface_id and not matching_hostinterface_uris:
return {'ret': False, 'msg': "HostInterface ID %s not present." % hostinterface_id}
elif len(uris) == 1:
hostinterface_uri = list.pop(uris)
else:
return {'ret': False, 'msg': "HostInterface ID not defined and multiple interfaces detected."}
response = self.get_request(self.root_uri + hostinterface_uri)
if response['ret'] is False:
return response
current_hostinterface_config = response['data']
payload = {}
for property in hostinterface_config.keys():
value = hostinterface_config[property]
if property not in current_hostinterface_config:
return {'ret': False, 'msg': "Property %s in hostinterface_config is invalid" % property}
if isinstance(value, dict):
if isinstance(current_hostinterface_config[property], dict):
payload[property] = value
elif isinstance(current_hostinterface_config[property], list):
payload[property] = list()
payload[property].append(value)
else:
return {'ret': False, 'msg': "Value of property %s in hostinterface_config is invalid" % property}
else:
payload[property] = value
need_change = False
for property in payload.keys():
set_value = payload[property]
cur_value = current_hostinterface_config[property]
if not isinstance(set_value, dict) and not isinstance(set_value, list):
if set_value != cur_value:
need_change = True
if isinstance(set_value, dict):
for subprop in payload[property].keys():
if subprop not in current_hostinterface_config[property]:
need_change = True
break
sub_set_value = payload[property][subprop]
sub_cur_value = current_hostinterface_config[property][subprop]
if sub_set_value != sub_cur_value:
need_change = True
if isinstance(set_value, list):
if len(set_value) != len(cur_value):
need_change = True
continue
for i in range(len(set_value)):
for subprop in payload[property][i].keys():
if subprop not in current_hostinterface_config[property][i]:
need_change = True
break
sub_set_value = payload[property][i][subprop]
sub_cur_value = current_hostinterface_config[property][i][subprop]
if sub_set_value != sub_cur_value:
need_change = True
if not need_change:
return {'ret': True, 'changed': False, 'msg': "Host Interface already configured"}
response = self.patch_request(self.root_uri + hostinterface_uri, payload)
if response['ret'] is False:
return response
return {'ret': True, 'changed': True, 'msg': "Modified Host Interface"}
def get_hostinterfaces(self):
result = {}
hostinterface_results = []
properties = ['Id', 'Name', 'Description', 'HostInterfaceType', 'Status',
'InterfaceEnabled', 'ExternallyAccessible', 'AuthenticationModes',
'AuthNoneRoleId', 'CredentialBootstrapping']
manager_uri_list = self.manager_uris
for manager_uri in manager_uri_list:
response = self.get_request(self.root_uri + manager_uri)
if response['ret'] is False:
return response
result['ret'] = True
data = response['data']
if 'HostInterfaces' in data:
hostinterfaces_uri = data[u'HostInterfaces'][u'@odata.id']
else:
continue
response = self.get_request(self.root_uri + hostinterfaces_uri)
data = response['data']
if 'Members' in data:
for hostinterface in data['Members']:
hostinterface_uri = hostinterface['@odata.id']
hostinterface_response = self.get_request(self.root_uri + hostinterface_uri)
# dictionary for capturing individual HostInterface properties
hostinterface_data_temp = {}
if hostinterface_response['ret'] is False:
return hostinterface_response
hostinterface_data = hostinterface_response['data']
for property in properties:
if property in hostinterface_data:
if hostinterface_data[property] is not None:
hostinterface_data_temp[property] = hostinterface_data[property]
# Check for the presence of a ManagerEthernetInterface
# object, a link to a _single_ EthernetInterface that the
# BMC uses to communicate with the host.
if 'ManagerEthernetInterface' in hostinterface_data:
interface_uri = hostinterface_data['ManagerEthernetInterface']['@odata.id']
interface_response = self.get_nic(interface_uri)
if interface_response['ret'] is False:
return interface_response
hostinterface_data_temp['ManagerEthernetInterface'] = interface_response['entries']
# Check for the presence of a HostEthernetInterfaces
# object, a link to a _collection_ of EthernetInterfaces
# that the host uses to communicate with the BMC.
if 'HostEthernetInterfaces' in hostinterface_data:
interfaces_uri = hostinterface_data['HostEthernetInterfaces']['@odata.id']
interfaces_response = self.get_request(self.root_uri + interfaces_uri)
if interfaces_response['ret'] is False:
return interfaces_response
interfaces_data = interfaces_response['data']
if 'Members' in interfaces_data:
for interface in interfaces_data['Members']:
interface_uri = interface['@odata.id']
interface_response = self.get_nic(interface_uri)
if interface_response['ret'] is False:
return interface_response
# Check if this is the first
# HostEthernetInterfaces item and create empty
# list if so.
if 'HostEthernetInterfaces' not in hostinterface_data_temp:
hostinterface_data_temp['HostEthernetInterfaces'] = []
hostinterface_data_temp['HostEthernetInterfaces'].append(interface_response['entries'])
hostinterface_results.append(hostinterface_data_temp)
else:
continue
result["entries"] = hostinterface_results
if not result["entries"]:
return {'ret': False, 'msg': "No HostInterface objects found"}
return result

View File

@@ -11,29 +11,28 @@ __metaclass__ = type
DOCUMENTATION = '''
---
module: lxd_container
short_description: Manage LXD Containers
short_description: Manage LXD instances
description:
- Management of LXD containers
- Management of LXD containers and virtual machines.
author: "Hiroaki Nakamura (@hnakamur)"
options:
name:
description:
- Name of a container.
- Name of an instance.
type: str
required: true
architecture:
description:
- 'The architecture for the container (for example C(x86_64) or C(i686)).
- 'The architecture for the instance (for example C(x86_64) or C(i686)).
See U(https://github.com/lxc/lxd/blob/master/doc/rest-api.md#post-1).'
type: str
required: false
config:
description:
- 'The config for the container (for example C({"limits.cpu": "2"})).
- 'The config for the instance (for example C({"limits.cpu": "2"})).
See U(https://github.com/lxc/lxd/blob/master/doc/rest-api.md#post-1).'
- If the container already exists and its "config" values in metadata
obtained from GET /1.0/containers/<name>
U(https://github.com/lxc/lxd/blob/master/doc/rest-api.md#10containersname)
- If the instance already exists and its "config" values in metadata
obtained from the LXD API U(https://github.com/lxc/lxd/blob/master/doc/rest-api.md#instances-containers-and-virtual-machines)
are different, this module tries to apply the configurations.
- The keys starting with C(volatile.) are ignored for this comparison when I(ignore_volatile_options=true).
type: dict
@@ -50,25 +49,25 @@ options:
version_added: 3.7.0
profiles:
description:
- Profile to be used by the container.
- Profile to be used by the instance.
type: list
elements: str
devices:
description:
- 'The devices for the container
- 'The devices for the instance
(for example C({ "rootfs": { "path": "/dev/kvm", "type": "unix-char" }})).
See U(https://github.com/lxc/lxd/blob/master/doc/rest-api.md#post-1).'
type: dict
required: false
ephemeral:
description:
- Whether or not the container is ephemeral (for example C(true) or C(false)).
- Whether or not the instance is ephemeral (for example C(true) or C(false)).
See U(https://github.com/lxc/lxd/blob/master/doc/rest-api.md#post-1).
required: false
type: bool
source:
description:
- 'The source for the container
- 'The source for the instance
(e.g. { "type": "image",
"mode": "pull",
"server": "https://images.linuxcontainers.org",
@@ -86,39 +85,49 @@ options:
- absent
- frozen
description:
- Define the state of a container.
- Define the state of an instance.
required: false
default: started
type: str
target:
description:
- For cluster deployments. Will attempt to create a container on a target node.
If container exists elsewhere in a cluster, then container will not be replaced or moved.
- For cluster deployments. Will attempt to create an instance on a target node.
If the instance exists elsewhere in a cluster, then it will not be replaced or moved.
The name should respond to same name of the node you see in C(lxc cluster list).
type: str
required: false
version_added: 1.0.0
timeout:
description:
- A timeout for changing the state of the container.
- A timeout for changing the state of the instance.
- This is also used as a timeout for waiting until IPv4 addresses
are set to the all network interfaces in the container after
are set to the all network interfaces in the instance after
starting or restarting.
required: false
default: 30
type: int
type:
description:
- Instance type can be either C(virtual-machine) or C(container).
required: false
default: container
choices:
- container
- virtual-machine
type: str
version_added: 4.1.0
wait_for_ipv4_addresses:
description:
- If this is true, the C(lxd_container) waits until IPv4 addresses
are set to the all network interfaces in the container after
are set to the all network interfaces in the instance after
starting or restarting.
required: false
default: false
type: bool
force_stop:
description:
- If this is true, the C(lxd_container) forces to stop the container
when it stops or restarts the container.
- If this is true, the C(lxd_container) forces to stop the instance
when it stops or restarts the instance.
required: false
default: false
type: bool
@@ -160,18 +169,18 @@ options:
required: false
type: str
notes:
- Containers must have a unique name. If you attempt to create a container
- Instances can be a container or a virtual machine, both of them must have unique name. If you attempt to create an instance
with a name that already existed in the users namespace the module will
simply return as "unchanged".
- There are two ways to run commands in containers, using the command
- There are two ways to run commands inside a container or virtual machine, using the command
module or using the ansible lxd connection plugin bundled in Ansible >=
2.1, the later requires python to be installed in the container which can
2.1, the later requires python to be installed in the instance which can
be done with the command module.
- You can copy a file from the host to the container
- You can copy a file from the host to the instance
with the Ansible M(ansible.builtin.copy) and M(ansible.builtin.template) module and the `lxd` connection plugin.
See the example below.
- You can copy a file in the created container to the localhost
with `command=lxc file pull container_name/dir/filename filename`.
- You can copy a file in the created instance to the localhost
with `command=lxc file pull instance_name/dir/filename filename`.
See the first example below.
'''
@@ -240,6 +249,7 @@ EXAMPLES = '''
community.general.lxd_container:
name: mycontainer
state: absent
type: container
# An example for restarting a container
- hosts: localhost
@@ -249,6 +259,7 @@ EXAMPLES = '''
community.general.lxd_container:
name: mycontainer
state: restarted
type: container
# An example for restarting a container using https to connect to the LXD server
- hosts: localhost
@@ -306,16 +317,36 @@ EXAMPLES = '''
mode: pull
alias: ubuntu/xenial/amd64
target: node02
# An example for creating a virtual machine
- hosts: localhost
connection: local
tasks:
- name: Create container on another node
community.general.lxd_container:
name: new-vm-1
type: virtual-machine
state: started
ignore_volatile_options: true
wait_for_ipv4_addresses: true
profiles: ["default"]
source:
protocol: simplestreams
type: image
mode: pull
server: https://images.linuxcontainers.org
alias: debian/11
timeout: 600
'''
RETURN = '''
addresses:
description: Mapping from the network device name to a list of IPv4 addresses in the container
description: Mapping from the network device name to a list of IPv4 addresses in the instance.
returned: when state is started or restarted
type: dict
sample: {"eth0": ["10.155.92.191"]}
old_state:
description: The old state of the container
description: The old state of the instance.
returned: when state is started or restarted
type: str
sample: "stopped"
@@ -325,7 +356,7 @@ logs:
type: list
sample: "(too long to be placed here)"
actions:
description: List of actions performed for the container.
description: List of actions performed for the instance.
returned: success
type: list
sample: '["create", "start"]'
@@ -384,6 +415,15 @@ class LXDContainerManagement(object):
self.addresses = None
self.target = self.module.params['target']
self.type = self.module.params['type']
# LXD Rest API provides additional endpoints for creating containers and virtual-machines.
self.api_endpoint = None
if self.type == 'container':
self.api_endpoint = '/1.0/containers'
elif self.type == 'virtual-machine':
self.api_endpoint = '/1.0/virtual-machines'
self.key_file = self.module.params.get('client_key')
if self.key_file is None:
self.key_file = '{0}/.config/lxc/client.key'.format(os.environ['HOME'])
@@ -419,20 +459,20 @@ class LXDContainerManagement(object):
if param_val is not None:
self.config[attr] = param_val
def _get_container_json(self):
def _get_instance_json(self):
return self.client.do(
'GET', '/1.0/containers/{0}'.format(self.name),
'GET', '{0}/{1}'.format(self.api_endpoint, self.name),
ok_error_codes=[404]
)
def _get_container_state_json(self):
def _get_instance_state_json(self):
return self.client.do(
'GET', '/1.0/containers/{0}/state'.format(self.name),
'GET', '{0}/{1}/state'.format(self.api_endpoint, self.name),
ok_error_codes=[404]
)
@staticmethod
def _container_json_to_module_state(resp_json):
def _instance_json_to_module_state(resp_json):
if resp_json['type'] == 'error':
return 'absent'
return ANSIBLE_LXD_STATES[resp_json['metadata']['status']]
@@ -441,45 +481,45 @@ class LXDContainerManagement(object):
body_json = {'action': action, 'timeout': self.timeout}
if force_stop:
body_json['force'] = True
return self.client.do('PUT', '/1.0/containers/{0}/state'.format(self.name), body_json=body_json)
return self.client.do('PUT', '{0}/{1}/state'.format(self.api_endpoint, self.name), body_json=body_json)
def _create_container(self):
def _create_instance(self):
config = self.config.copy()
config['name'] = self.name
if self.target:
self.client.do('POST', '/1.0/containers?' + urlencode(dict(target=self.target)), config)
self.client.do('POST', '{0}?{1}'.format(self.api_endpoint, urlencode(dict(target=self.target))), config)
else:
self.client.do('POST', '/1.0/containers', config)
self.client.do('POST', self.api_endpoint, config)
self.actions.append('create')
def _start_container(self):
def _start_instance(self):
self._change_state('start')
self.actions.append('start')
def _stop_container(self):
def _stop_instance(self):
self._change_state('stop', self.force_stop)
self.actions.append('stop')
def _restart_container(self):
def _restart_instance(self):
self._change_state('restart', self.force_stop)
self.actions.append('restart')
def _delete_container(self):
self.client.do('DELETE', '/1.0/containers/{0}'.format(self.name))
def _delete_instance(self):
self.client.do('DELETE', '{0}/{1}'.format(self.api_endpoint, self.name))
self.actions.append('delete')
def _freeze_container(self):
def _freeze_instance(self):
self._change_state('freeze')
self.actions.append('freeze')
def _unfreeze_container(self):
def _unfreeze_instance(self):
self._change_state('unfreeze')
self.actions.append('unfreez')
def _container_ipv4_addresses(self, ignore_devices=None):
def _instance_ipv4_addresses(self, ignore_devices=None):
ignore_devices = ['lo'] if ignore_devices is None else ignore_devices
resp_json = self._get_container_state_json()
resp_json = self._get_instance_state_json()
network = resp_json['metadata']['network'] or {}
network = dict((k, v) for k, v in network.items() if k not in ignore_devices) or {}
addresses = dict((k, [a['address'] for a in v['addresses'] if a['family'] == 'inet']) for k, v in network.items()) or {}
@@ -494,7 +534,7 @@ class LXDContainerManagement(object):
due = datetime.datetime.now() + datetime.timedelta(seconds=self.timeout)
while datetime.datetime.now() < due:
time.sleep(1)
addresses = self._container_ipv4_addresses()
addresses = self._instance_ipv4_addresses()
if self._has_all_ipv4_addresses(addresses):
self.addresses = addresses
return
@@ -504,72 +544,72 @@ class LXDContainerManagement(object):
def _started(self):
if self.old_state == 'absent':
self._create_container()
self._start_container()
self._create_instance()
self._start_instance()
else:
if self.old_state == 'frozen':
self._unfreeze_container()
self._unfreeze_instance()
elif self.old_state == 'stopped':
self._start_container()
if self._needs_to_apply_container_configs():
self._apply_container_configs()
self._start_instance()
if self._needs_to_apply_instance_configs():
self._apply_instance_configs()
if self.wait_for_ipv4_addresses:
self._get_addresses()
def _stopped(self):
if self.old_state == 'absent':
self._create_container()
self._create_instance()
else:
if self.old_state == 'stopped':
if self._needs_to_apply_container_configs():
self._start_container()
self._apply_container_configs()
self._stop_container()
if self._needs_to_apply_instance_configs():
self._start_instance()
self._apply_instance_configs()
self._stop_instance()
else:
if self.old_state == 'frozen':
self._unfreeze_container()
if self._needs_to_apply_container_configs():
self._apply_container_configs()
self._stop_container()
self._unfreeze_instance()
if self._needs_to_apply_instance_configs():
self._apply_instance_configs()
self._stop_instance()
def _restarted(self):
if self.old_state == 'absent':
self._create_container()
self._start_container()
self._create_instance()
self._start_instance()
else:
if self.old_state == 'frozen':
self._unfreeze_container()
if self._needs_to_apply_container_configs():
self._apply_container_configs()
self._restart_container()
self._unfreeze_instance()
if self._needs_to_apply_instance_configs():
self._apply_instance_configs()
self._restart_instance()
if self.wait_for_ipv4_addresses:
self._get_addresses()
def _destroyed(self):
if self.old_state != 'absent':
if self.old_state == 'frozen':
self._unfreeze_container()
self._unfreeze_instance()
if self.old_state != 'stopped':
self._stop_container()
self._delete_container()
self._stop_instance()
self._delete_instance()
def _frozen(self):
if self.old_state == 'absent':
self._create_container()
self._start_container()
self._freeze_container()
self._create_instance()
self._start_instance()
self._freeze_instance()
else:
if self.old_state == 'stopped':
self._start_container()
if self._needs_to_apply_container_configs():
self._apply_container_configs()
self._freeze_container()
self._start_instance()
if self._needs_to_apply_instance_configs():
self._apply_instance_configs()
self._freeze_instance()
def _needs_to_change_container_config(self, key):
def _needs_to_change_instance_config(self, key):
if key not in self.config:
return False
if key == 'config' and self.ignore_volatile_options: # the old behavior is to ignore configurations by keyword "volatile"
old_configs = dict((k, v) for k, v in self.old_container_json['metadata'][key].items() if not k.startswith('volatile.'))
old_configs = dict((k, v) for k, v in self.old_instance_json['metadata'][key].items() if not k.startswith('volatile.'))
for k, v in self.config['config'].items():
if k not in old_configs:
return True
@@ -577,7 +617,7 @@ class LXDContainerManagement(object):
return True
return False
elif key == 'config': # next default behavior
old_configs = dict((k, v) for k, v in self.old_container_json['metadata'][key].items())
old_configs = dict((k, v) for k, v in self.old_instance_json['metadata'][key].items())
for k, v in self.config['config'].items():
if k not in old_configs:
return True
@@ -585,39 +625,41 @@ class LXDContainerManagement(object):
return True
return False
else:
old_configs = self.old_container_json['metadata'][key]
old_configs = self.old_instance_json['metadata'][key]
return self.config[key] != old_configs
def _needs_to_apply_container_configs(self):
def _needs_to_apply_instance_configs(self):
return (
self._needs_to_change_container_config('architecture') or
self._needs_to_change_container_config('config') or
self._needs_to_change_container_config('ephemeral') or
self._needs_to_change_container_config('devices') or
self._needs_to_change_container_config('profiles')
self._needs_to_change_instance_config('architecture') or
self._needs_to_change_instance_config('config') or
self._needs_to_change_instance_config('ephemeral') or
self._needs_to_change_instance_config('devices') or
self._needs_to_change_instance_config('profiles')
)
def _apply_container_configs(self):
old_metadata = self.old_container_json['metadata']
def _apply_instance_configs(self):
old_metadata = self.old_instance_json['metadata']
body_json = {
'architecture': old_metadata['architecture'],
'config': old_metadata['config'],
'devices': old_metadata['devices'],
'profiles': old_metadata['profiles']
}
if self._needs_to_change_container_config('architecture'):
if self._needs_to_change_instance_config('architecture'):
body_json['architecture'] = self.config['architecture']
if self._needs_to_change_container_config('config'):
if self._needs_to_change_instance_config('config'):
for k, v in self.config['config'].items():
body_json['config'][k] = v
if self._needs_to_change_container_config('ephemeral'):
if self._needs_to_change_instance_config('ephemeral'):
body_json['ephemeral'] = self.config['ephemeral']
if self._needs_to_change_container_config('devices'):
if self._needs_to_change_instance_config('devices'):
body_json['devices'] = self.config['devices']
if self._needs_to_change_container_config('profiles'):
if self._needs_to_change_instance_config('profiles'):
body_json['profiles'] = self.config['profiles']
self.client.do('PUT', '/1.0/containers/{0}'.format(self.name), body_json=body_json)
self.actions.append('apply_container_configs')
self.client.do('PUT', '{0}/{1}'.format(self.api_endpoint, self.name), body_json=body_json)
self.actions.append('apply_instance_configs')
def run(self):
"""Run the main method."""
@@ -627,8 +669,8 @@ class LXDContainerManagement(object):
self.client.authenticate(self.trust_password)
self.ignore_volatile_options = self.module.params.get('ignore_volatile_options')
self.old_container_json = self._get_container_json()
self.old_state = self._container_json_to_module_state(self.old_container_json)
self.old_instance_json = self._get_instance_json()
self.old_state = self._instance_json_to_module_state(self.old_instance_json)
action = getattr(self, LXD_ANSIBLE_STATES[self.state])
action()
@@ -698,6 +740,11 @@ def main():
type='int',
default=30
),
type=dict(
type='str',
default='container',
choices=['container', 'virtual-machine'],
),
wait_for_ipv4_addresses=dict(
type='bool',
default=False
@@ -736,6 +783,7 @@ def main():
'This will change in the future. Please test your scripts'
'by "ignore_volatile_options: false". To keep the old behavior, set that option explicitly to "true"',
version='6.0.0', collection_name='community.general')
lxd_manage = LXDContainerManagement(module=module)
lxd_manage.run()

View File

@@ -319,11 +319,25 @@ def remove_workspace(bin_path, project_path, workspace):
_workspace_cmd(bin_path, project_path, 'delete', workspace)
def build_plan(command, project_path, variables_args, state_file, targets, state, plan_path=None):
def build_plan(command, project_path, variables_args, state_file, targets, state, apply_args, plan_path=None):
if plan_path is None:
f, plan_path = tempfile.mkstemp(suffix='.tfplan')
plan_command = [command[0], 'plan', '-input=false', '-no-color', '-detailed-exitcode', '-out', plan_path]
local_command = command.copy()
plan_command = [command[0], 'plan']
if state == "planned":
for c in local_command[1:]:
plan_command.append(c)
if state == "present":
for a in apply_args:
local_command.remove(a)
for c in local_command[1:]:
plan_command.append(c)
plan_command.extend(['-input=false', '-no-color', '-detailed-exitcode', '-out', plan_path])
for t in targets:
plan_command.extend(['-target', t])
@@ -461,7 +475,7 @@ def main():
module.fail_json(msg='Could not find plan_file "{0}", check the path and try again.'.format(plan_file))
else:
plan_file, needs_application, out, err, command = build_plan(command, project_path, variables_args, state_file,
module.params.get('targets'), state, plan_file)
module.params.get('targets'), state, APPLY_ARGS, plan_file)
if state == 'present' and check_destroy and '- destroy' in out:
module.fail_json(msg="Aborting command because it would destroy some resources. "
"Consider switching the 'check_destroy' to false to suppress this error")

View File

@@ -69,10 +69,11 @@ options:
type: str
ip4:
description:
- The IPv4 address to this interface.
- List of IPv4 addresses to this interface.
- Use the format C(192.0.2.24/24).
- If defined and I(method4) is not specified, automatically set C(ipv4.method) to C(manual).
type: str
type: list
elements: str
gw4:
description:
- The IPv4 gateway for this interface.
@@ -822,7 +823,9 @@ EXAMPLES = r'''
# nmcli_ethernet:
# - conn_name: em1
# ifname: em1
# ip4: '{{ tenant_ip }}'
# ip4:
# - '{{ tenant_ip }}'
# - '{{ second_tenant_ip }}'
# gw4: '{{ tenant_gw }}'
# - conn_name: em2
# ifname: em2
@@ -844,6 +847,7 @@ EXAMPLES = r'''
# storage_ip: "192.0.2.91/23"
# external_ip: "198.51.100.23/21"
# tenant_ip: "203.0.113.77/23"
# second_tenant_ip: "204.0.113.77/23"
# ```
@@ -997,6 +1001,16 @@ EXAMPLES = r'''
type: ethernet
state: present
- name: Add second ip4 address
community.general.nmcli:
conn_name: my-eth1
ifname: eth1
type: ethernet
ip4:
- 192.0.2.100/24
- 192.0.3.100/24
state: present
- name: Add VxLan
community.general.nmcli:
type: vxlan
@@ -1468,7 +1482,8 @@ class Nmcli(object):
'ipv6.ignore-auto-routes',
'802-11-wireless.hidden'):
return bool
elif setting in ('ipv4.dns',
elif setting in ('ipv4.addresses',
'ipv4.dns',
'ipv4.dns-search',
'ipv4.routes',
'ipv4.routing-rules',
@@ -1758,7 +1773,7 @@ def main():
'wifi',
'gsm',
]),
ip4=dict(type='str'),
ip4=dict(type='list', elements='str'),
gw4=dict(type='str'),
gw4_ignore_auto=dict(type='bool', default=False),
routes4=dict(type='list', elements='str'),

View File

@@ -102,6 +102,20 @@ packages:
returned: when upgrade is set to yes
type: list
sample: [ package, other-package ]
stdout:
description: Output from pacman.
returned: success, when needed
type: str
sample: ":: Synchronizing package databases... core is up to date :: Starting full system upgrade..."
version_added: 4.1.0
stderr:
description: Error output from pacman.
returned: success, when needed
type: str
sample: "warning: libtool: local (2.4.6+44+gb9b44533-14) is newer than core (2.4.6+42+gb88cebd5-15)\nwarning ..."
version_added: 4.1.0
'''
EXAMPLES = '''
@@ -236,9 +250,9 @@ def update_package_db(module, pacman_path):
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
return True
return stdout, stderr
else:
module.fail_json(msg="could not update package db")
module.fail_json(msg="could not update package db", stdout=stdout, stderr=stderr)
def upgrade(module, pacman_path):
@@ -273,11 +287,11 @@ def upgrade(module, pacman_path):
rc, stdout, stderr = module.run_command(cmdupgrade, check_rc=False)
if rc == 0:
if packages:
module.exit_json(changed=True, msg='System upgraded', packages=packages, diff=diff)
module.exit_json(changed=True, msg='System upgraded', packages=packages, diff=diff, stdout=stdout, stderr=stderr)
else:
module.exit_json(changed=False, msg='Nothing to upgrade', packages=packages)
else:
module.fail_json(msg="Could not upgrade")
module.fail_json(msg="Could not upgrade", stdout=stdout, stderr=stderr)
else:
module.exit_json(changed=False, msg='Nothing to upgrade', packages=packages)
@@ -293,6 +307,8 @@ def remove_packages(module, pacman_path, packages):
module.params["extra_args"] += " --nodeps --nodeps"
remove_c = 0
stdout_total = ""
stderr_total = ""
# Using a for loop in case of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
@@ -304,8 +320,10 @@ def remove_packages(module, pacman_path, packages):
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to remove %s" % (package))
module.fail_json(msg="failed to remove %s" % (package), stdout=stdout, stderr=stderr)
stdout_total += stdout
stderr_total += stderr
if module._diff:
d = stdout.split('\n')[2].split(' ')[2:]
for i, pkg in enumerate(d):
@@ -316,7 +334,7 @@ def remove_packages(module, pacman_path, packages):
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c, diff=diff)
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c, diff=diff, stdout=stdout_total, stderr=stderr_total)
module.exit_json(changed=False, msg="package(s) already absent")
@@ -352,7 +370,7 @@ def install_packages(module, pacman_path, state, packages, package_files):
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to install %s: %s" % (" ".join(to_install_repos), stderr))
module.fail_json(msg="failed to install %s: %s" % (" ".join(to_install_repos), stderr), stdout=stdout, stderr=stderr)
# As we pass `--needed` to pacman returns a single line of ` there is nothing to do` if no change is performed.
# The check for > 3 is here because we pick the 4th line in normal operation.
@@ -371,7 +389,7 @@ def install_packages(module, pacman_path, state, packages, package_files):
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to install %s: %s" % (" ".join(to_install_files), stderr))
module.fail_json(msg="failed to install %s: %s" % (" ".join(to_install_files), stderr), stdout=stdout, stderr=stderr)
# As we pass `--needed` to pacman returns a single line of ` there is nothing to do` if no change is performed.
# The check for > 3 is here because we pick the 4th line in normal operation.
@@ -389,7 +407,7 @@ def install_packages(module, pacman_path, state, packages, package_files):
message = "But could not ensure 'latest' state for %s package(s) as remote version could not be fetched." % (package_err)
if install_c > 0:
module.exit_json(changed=True, msg="installed %s package(s). %s" % (install_c, message), diff=diff)
module.exit_json(changed=True, msg="installed %s package(s). %s" % (install_c, message), diff=diff, stdout=stdout, stderr=stderr)
module.exit_json(changed=False, msg="package(s) already installed. %s" % (message), diff=diff)
@@ -479,9 +497,9 @@ def main():
p['state'] = 'absent'
if p["update_cache"] and not module.check_mode:
update_package_db(module, pacman_path)
stdout, stderr = update_package_db(module, pacman_path)
if not (p['name'] or p['upgrade']):
module.exit_json(changed=True, msg='Updated the package master lists')
module.exit_json(changed=True, msg='Updated the package master lists', stdout=stdout, stderr=stderr)
if p['update_cache'] and module.check_mode and not (p['name'] or p['upgrade']):
module.exit_json(changed=True, msg='Would have updated the package cache')

View File

@@ -35,6 +35,12 @@ options:
- Password to connect to the BMC.
required: true
type: str
key:
description:
- Encryption key to connect to the BMC in hex format.
required: false
type: str
version_added: 4.1.0
bootdev:
description:
- Set boot device to use on next reboot
@@ -115,11 +121,13 @@ EXAMPLES = '''
name: test.testdomain.com
user: admin
password: password
key: 1234567890AABBCCDEFF000000EEEE12
bootdev: network
state: absent
'''
import traceback
import binascii
PYGHMI_IMP_ERR = None
try:
@@ -138,6 +146,7 @@ def main():
port=dict(default=623, type='int'),
user=dict(required=True, no_log=True),
password=dict(required=True, no_log=True),
key=dict(type='str', no_log=True),
state=dict(default='present', choices=['present', 'absent']),
bootdev=dict(required=True, choices=['network', 'hd', 'floppy', 'safe', 'optical', 'setup', 'default']),
persistent=dict(default=False, type='bool'),
@@ -162,10 +171,18 @@ def main():
if state == 'absent' and bootdev == 'default':
module.fail_json(msg="The bootdev 'default' cannot be used with state 'absent'.")
try:
if module.params['key']:
key = binascii.unhexlify(module.params['key'])
else:
key = None
except Exception as e:
module.fail_json(msg="Unable to convert 'key' from hex string.")
# --- run command ---
try:
ipmi_cmd = command.Command(
bmc=name, userid=user, password=password, port=port
bmc=name, userid=user, password=password, port=port, kg=key
)
module.debug('ipmi instantiated - name: "%s"' % name)
current = ipmi_cmd.get_bootdev()

View File

@@ -35,6 +35,12 @@ options:
- Password to connect to the BMC.
required: true
type: str
key:
description:
- Encryption key to connect to the BMC in hex format.
required: false
type: str
version_added: 4.1.0
state:
description:
- Whether to ensure that the machine in desired state.
@@ -76,6 +82,7 @@ EXAMPLES = '''
'''
import traceback
import binascii
PYGHMI_IMP_ERR = None
try:
@@ -95,6 +102,7 @@ def main():
state=dict(required=True, choices=['on', 'off', 'shutdown', 'reset', 'boot']),
user=dict(required=True, no_log=True),
password=dict(required=True, no_log=True),
key=dict(type='str', no_log=True),
timeout=dict(default=300, type='int'),
),
supports_check_mode=True,
@@ -110,10 +118,18 @@ def main():
state = module.params['state']
timeout = module.params['timeout']
try:
if module.params['key']:
key = binascii.unhexlify(module.params['key'])
else:
key = None
except Exception as e:
module.fail_json(msg="Unable to convert 'key' from hex string.")
# --- run command ---
try:
ipmi_cmd = command.Command(
bmc=name, userid=user, password=password, port=port
bmc=name, userid=user, password=password, port=port, kg=key
)
module.debug('ipmi instantiated - name: "%s"' % name)

View File

@@ -100,6 +100,18 @@ options:
type: bool
default: false
version_added: 3.7.0
hostinterface_config:
required: false
description:
- Setting dict of HostInterface on OOB controller.
type: dict
version_added: '4.1.0'
hostinterface_id:
required: false
description:
- Redfish HostInterface instance ID if multiple HostInterfaces are present.
type: str
version_added: '4.1.0'
author: "Jose Delarosa (@jose-delarosa)"
'''
@@ -201,6 +213,27 @@ EXAMPLES = '''
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
- name: Disable Host Interface
community.general.redfish_config:
category: Manager
command: SetHostInterface
hostinterface_config:
InterfaceEnabled: false
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
- name: Enable Host Interface for HostInterface resource ID '2'
community.general.redfish_config:
category: Manager
command: SetHostInterface
hostinterface_config:
InterfaceEnabled: true
hostinterface_id: "2"
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
'''
RETURN = '''
@@ -220,7 +253,7 @@ from ansible.module_utils.common.text.converters import to_native
CATEGORY_COMMANDS_ALL = {
"Systems": ["SetBiosDefaultSettings", "SetBiosAttributes", "SetBootOrder",
"SetDefaultBootOrder"],
"Manager": ["SetNetworkProtocols", "SetManagerNic"]
"Manager": ["SetNetworkProtocols", "SetManagerNic", "SetHostInterface"]
}
@@ -248,6 +281,8 @@ def main():
default={}
),
strip_etag_quotes=dict(type='bool', default=False),
hostinterface_config=dict(type='dict', default={}),
hostinterface_id=dict(),
),
required_together=[
('username', 'password'),
@@ -288,6 +323,12 @@ def main():
# Etag options
strip_etag_quotes = module.params['strip_etag_quotes']
# HostInterface config options
hostinterface_config = module.params['hostinterface_config']
# HostInterface instance ID
hostinterface_id = module.params['hostinterface_id']
# Build root URI
root_uri = "https://" + module.params['baseuri']
rf_utils = RedfishUtils(creds, root_uri, timeout, module,
@@ -331,6 +372,8 @@ def main():
result = rf_utils.set_network_protocols(module.params['network_protocols'])
elif command == "SetManagerNic":
result = rf_utils.set_manager_nic(nic_addr, nic_config)
elif command == "SetHostInterface":
result = rf_utils.set_hostinterface_attributes(hostinterface_config, hostinterface_id)
# Return data back or fail with proper message
if result['ret'] is True:

View File

@@ -269,6 +269,14 @@ EXAMPLES = '''
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
- name: Get manager Redfish Host Interface inventory
community.general.redfish_info:
category: Manager
command: GetHostInterfaces
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
'''
RETURN = '''
@@ -293,7 +301,7 @@ CATEGORY_COMMANDS_ALL = {
"Sessions": ["GetSessions"],
"Update": ["GetFirmwareInventory", "GetFirmwareUpdateCapabilities", "GetSoftwareInventory"],
"Manager": ["GetManagerNicInventory", "GetVirtualMedia", "GetLogs", "GetNetworkProtocols",
"GetHealthReport"],
"GetHealthReport", "GetHostInterfaces"],
}
CATEGORY_COMMANDS_DEFAULT = {
@@ -475,6 +483,8 @@ def main():
result["network_protocols"] = rf_utils.get_network_protocols()
elif command == "GetHealthReport":
result["health_report"] = rf_utils.get_multi_manager_health_report()
elif command == "GetHostInterfaces":
result["host_interfaces"] = rf_utils.get_hostinterfaces()
# Return data back
module.exit_json(redfish_facts=result)

View File

@@ -42,16 +42,18 @@ options:
description:
description:
- Description for the repository.
- Defaults to empty if I(force_defaults=true), which is the default in this module.
- Defaults to empty if I(force_defaults=false) when creating a new repository.
- This is only used when I(state) is C(present).
type: str
default: ''
required: false
private:
description:
- Whether the new repository should be private or not.
- Whether the repository should be private or not.
- Defaults to C(false) if I(force_defaults=true), which is the default in this module.
- Defaults to C(false) if I(force_defaults=false) when creating a new repository.
- This is only used when I(state) is C(present).
type: bool
default: no
required: false
state:
description:
@@ -72,6 +74,14 @@ options:
type: str
default: 'https://api.github.com'
version_added: "3.5.0"
force_defaults:
description:
- Overwrite current I(description) and I(private) attributes with defaults if set to C(true), which currently is the default.
- The default for this option will be deprecated in a future version of this collection, and eventually change to C(false).
type: bool
default: true
required: false
version_added: 4.1.0
requirements:
- PyGithub>=1.54
notes:
@@ -92,6 +102,7 @@ EXAMPLES = '''
description: "Just for fun"
private: yes
state: present
force_defaults: no
register: result
- name: Delete the repository
@@ -117,7 +128,7 @@ import sys
GITHUB_IMP_ERR = None
try:
from github import Github, GithubException
from github import Github, GithubException, GithubObject
from github.GithubException import UnknownObjectException
HAS_GITHUB_PACKAGE = True
except Exception:
@@ -135,7 +146,7 @@ def authenticate(username=None, password=None, access_token=None, api_url=None):
return Github(base_url=api_url, login_or_token=username, password=password)
def create_repo(gh, name, organization=None, private=False, description='', check_mode=False):
def create_repo(gh, name, organization=None, private=None, description=None, check_mode=False):
result = dict(
changed=False,
repo=dict())
@@ -151,16 +162,21 @@ def create_repo(gh, name, organization=None, private=False, description='', chec
except UnknownObjectException:
if not check_mode:
repo = target.create_repo(
name=name, private=private, description=description)
name=name,
private=GithubObject.NotSet if private is None else private,
description=GithubObject.NotSet if description is None else description,
)
result['repo'] = repo.raw_data
result['changed'] = True
changes = {}
if repo is None or repo.raw_data['private'] != private:
changes['private'] = private
if repo is None or repo.raw_data['description'] != description:
changes['description'] = description
if private is not None:
if repo is None or repo.raw_data['private'] != private:
changes['private'] = private
if description is not None:
if repo is None or repo.raw_data['description'] not in (description, description or None):
changes['description'] = description
if changes:
if not check_mode:
@@ -193,6 +209,10 @@ def delete_repo(gh, name, organization=None, check_mode=False):
def run_module(params, check_mode=False):
if params['force_defaults']:
params['description'] = params['description'] or ''
params['private'] = params['private'] or False
gh = authenticate(
username=params['username'], password=params['password'], access_token=params['access_token'],
api_url=params['api_url'])
@@ -216,17 +236,17 @@ def run_module(params, check_mode=False):
def main():
module_args = dict(
username=dict(type='str', required=False, default=None),
password=dict(type='str', required=False, default=None, no_log=True),
access_token=dict(type='str', required=False,
default=None, no_log=True),
username=dict(type='str'),
password=dict(type='str', no_log=True),
access_token=dict(type='str', no_log=True),
name=dict(type='str', required=True),
state=dict(type='str', required=False, default="present",
choices=["present", "absent"]),
organization=dict(type='str', required=False, default=None),
private=dict(type='bool', required=False, default=False),
description=dict(type='str', required=False, default=''),
private=dict(type='bool'),
description=dict(type='str'),
api_url=dict(type='str', required=False, default='https://api.github.com'),
force_defaults=dict(type='bool', default=True),
)
module = AnsibleModule(
argument_spec=module_args,

View File

@@ -126,51 +126,51 @@ from ansible.module_utils.api import basic_auth_argument_spec
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.common.text.converters import to_native
from ansible_collections.community.general.plugins.module_utils.gitlab import findProject, gitlabAuthentication
from ansible_collections.community.general.plugins.module_utils.gitlab import find_project, gitlab_authentication
class GitLabDeployKey(object):
def __init__(self, module, gitlab_instance):
self._module = module
self._gitlab = gitlab_instance
self.deployKeyObject = None
self.deploy_key_object = None
'''
@param project Project object
@param key_title Title of the key
@param key_key String of the key
@param key_can_push Option of the deployKey
@param key_can_push Option of the deploy_key
@param options Deploy key options
'''
def createOrUpdateDeployKey(self, project, key_title, key_key, options):
def create_or_update_deploy_key(self, project, key_title, key_key, options):
changed = False
# note: unfortunately public key cannot be updated directly by
# GitLab REST API, so for that case we need to delete and
# than recreate the key
if self.deployKeyObject and self.deployKeyObject.key != key_key:
if self.deploy_key_object and self.deploy_key_object.key != key_key:
if not self._module.check_mode:
self.deployKeyObject.delete()
self.deployKeyObject = None
self.deploy_key_object.delete()
self.deploy_key_object = None
# Because we have already call existsDeployKey in main()
if self.deployKeyObject is None:
deployKey = self.createDeployKey(project, {
# Because we have already call exists_deploy_key in main()
if self.deploy_key_object is None:
deploy_key = self.create_deploy_key(project, {
'title': key_title,
'key': key_key,
'can_push': options['can_push']})
changed = True
else:
changed, deployKey = self.updateDeployKey(self.deployKeyObject, {
changed, deploy_key = self.update_deploy_key(self.deploy_key_object, {
'can_push': options['can_push']})
self.deployKeyObject = deployKey
self.deploy_key_object = deploy_key
if changed:
if self._module.check_mode:
self._module.exit_json(changed=True, msg="Successfully created or updated the deploy key %s" % key_title)
try:
deployKey.save()
deploy_key.save()
except Exception as e:
self._module.fail_json(msg="Failed to update deploy key: %s " % e)
return True
@@ -179,61 +179,61 @@ class GitLabDeployKey(object):
'''
@param project Project Object
@param arguments Attributes of the deployKey
@param arguments Attributes of the deploy_key
'''
def createDeployKey(self, project, arguments):
def create_deploy_key(self, project, arguments):
if self._module.check_mode:
return True
try:
deployKey = project.keys.create(arguments)
deploy_key = project.keys.create(arguments)
except (gitlab.exceptions.GitlabCreateError) as e:
self._module.fail_json(msg="Failed to create deploy key: %s " % to_native(e))
return deployKey
return deploy_key
'''
@param deployKey Deploy Key Object
@param arguments Attributes of the deployKey
@param deploy_key Deploy Key Object
@param arguments Attributes of the deploy_key
'''
def updateDeployKey(self, deployKey, arguments):
def update_deploy_key(self, deploy_key, arguments):
changed = False
for arg_key, arg_value in arguments.items():
if arguments[arg_key] is not None:
if getattr(deployKey, arg_key) != arguments[arg_key]:
setattr(deployKey, arg_key, arguments[arg_key])
if getattr(deploy_key, arg_key) != arguments[arg_key]:
setattr(deploy_key, arg_key, arguments[arg_key])
changed = True
return (changed, deployKey)
return (changed, deploy_key)
'''
@param project Project object
@param key_title Title of the key
'''
def findDeployKey(self, project, key_title):
deployKeys = project.keys.list(all=True)
for deployKey in deployKeys:
if (deployKey.title == key_title):
return deployKey
def find_deploy_key(self, project, key_title):
deploy_keys = project.keys.list(all=True)
for deploy_key in deploy_keys:
if (deploy_key.title == key_title):
return deploy_key
'''
@param project Project object
@param key_title Title of the key
'''
def existsDeployKey(self, project, key_title):
# When project exists, object will be stored in self.projectObject.
deployKey = self.findDeployKey(project, key_title)
if deployKey:
self.deployKeyObject = deployKey
def exists_deploy_key(self, project, key_title):
# When project exists, object will be stored in self.project_object.
deploy_key = self.find_deploy_key(project, key_title)
if deploy_key:
self.deploy_key_object = deploy_key
return True
return False
def deleteDeployKey(self):
def delete_deploy_key(self):
if self._module.check_mode:
return True
return self.deployKeyObject.delete()
return self.deploy_key_object.delete()
def main():
@@ -271,32 +271,32 @@ def main():
if not HAS_GITLAB_PACKAGE:
module.fail_json(msg=missing_required_lib("python-gitlab"), exception=GITLAB_IMP_ERR)
gitlab_instance = gitlabAuthentication(module)
gitlab_instance = gitlab_authentication(module)
gitlab_deploy_key = GitLabDeployKey(module, gitlab_instance)
project = findProject(gitlab_instance, project_identifier)
project = find_project(gitlab_instance, project_identifier)
if project is None:
module.fail_json(msg="Failed to create deploy key: project %s doesn't exists" % project_identifier)
deployKey_exists = gitlab_deploy_key.existsDeployKey(project, key_title)
deploy_key_exists = gitlab_deploy_key.exists_deploy_key(project, key_title)
if state == 'absent':
if deployKey_exists:
gitlab_deploy_key.deleteDeployKey()
if deploy_key_exists:
gitlab_deploy_key.delete_deploy_key()
module.exit_json(changed=True, msg="Successfully deleted deploy key %s" % key_title)
else:
module.exit_json(changed=False, msg="Deploy key deleted or does not exists")
if state == 'present':
if gitlab_deploy_key.createOrUpdateDeployKey(project, key_title, key_keyfile, {'can_push': key_can_push}):
if gitlab_deploy_key.create_or_update_deploy_key(project, key_title, key_keyfile, {'can_push': key_can_push}):
module.exit_json(changed=True, msg="Successfully created or updated the deploy key %s" % key_title,
deploy_key=gitlab_deploy_key.deployKeyObject._attrs)
deploy_key=gitlab_deploy_key.deploy_key_object._attrs)
else:
module.exit_json(changed=False, msg="No need to update the deploy key %s" % key_title,
deploy_key=gitlab_deploy_key.deployKeyObject._attrs)
deploy_key=gitlab_deploy_key.deploy_key_object._attrs)
if __name__ == '__main__':

View File

@@ -169,19 +169,19 @@ from ansible.module_utils.api import basic_auth_argument_spec
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.common.text.converters import to_native
from ansible_collections.community.general.plugins.module_utils.gitlab import findGroup, gitlabAuthentication
from ansible_collections.community.general.plugins.module_utils.gitlab import find_group, gitlab_authentication
class GitLabGroup(object):
def __init__(self, module, gitlab_instance):
self._module = module
self._gitlab = gitlab_instance
self.groupObject = None
self.group_object = None
'''
@param group Group object
'''
def getGroupId(self, group):
def get_group_id(self, group):
if group is not None:
return group.id
return None
@@ -191,12 +191,12 @@ class GitLabGroup(object):
@param parent Parent group full path
@param options Group options
'''
def createOrUpdateGroup(self, name, parent, options):
def create_or_update_group(self, name, parent, options):
changed = False
# Because we have already call userExists in main()
if self.groupObject is None:
parent_id = self.getGroupId(parent)
if self.group_object is None:
parent_id = self.get_group_id(parent)
payload = {
'name': name,
@@ -211,10 +211,10 @@ class GitLabGroup(object):
payload['description'] = options['description']
if options.get('require_two_factor_authentication'):
payload['require_two_factor_authentication'] = options['require_two_factor_authentication']
group = self.createGroup(payload)
group = self.create_group(payload)
changed = True
else:
changed, group = self.updateGroup(self.groupObject, {
changed, group = self.update_group(self.group_object, {
'name': name,
'description': options['description'],
'visibility': options['visibility'],
@@ -224,7 +224,7 @@ class GitLabGroup(object):
'require_two_factor_authentication': options['require_two_factor_authentication'],
})
self.groupObject = group
self.group_object = group
if changed:
if self._module.check_mode:
self._module.exit_json(changed=True, msg="Successfully created or updated the group %s" % name)
@@ -240,7 +240,7 @@ class GitLabGroup(object):
'''
@param arguments Attributes of the group
'''
def createGroup(self, arguments):
def create_group(self, arguments):
if self._module.check_mode:
return True
@@ -255,7 +255,7 @@ class GitLabGroup(object):
@param group Group Object
@param arguments Attributes of the group
'''
def updateGroup(self, group, arguments):
def update_group(self, group, arguments):
changed = False
for arg_key, arg_value in arguments.items():
@@ -266,8 +266,8 @@ class GitLabGroup(object):
return (changed, group)
def deleteGroup(self):
group = self.groupObject
def delete_group(self):
group = self.group_object
if len(group.projects.list()) >= 1:
self._module.fail_json(
@@ -285,11 +285,11 @@ class GitLabGroup(object):
@param name Name of the groupe
@param full_path Complete path of the Group including parent group path. <parent_path>/<group_path>
'''
def existsGroup(self, project_identifier):
# When group/user exists, object will be stored in self.groupObject.
group = findGroup(self._gitlab, project_identifier)
def exists_group(self, project_identifier):
# When group/user exists, object will be stored in self.group_object.
group = find_group(self._gitlab, project_identifier)
if group:
self.groupObject = group
self.group_object = group
return True
return False
@@ -339,7 +339,7 @@ def main():
if not HAS_GITLAB_PACKAGE:
module.fail_json(msg=missing_required_lib("python-gitlab"), exception=GITLAB_IMP_ERR)
gitlab_instance = gitlabAuthentication(module)
gitlab_instance = gitlab_authentication(module)
# Define default group_path based on group_name
if group_path is None:
@@ -349,34 +349,34 @@ def main():
parent_group = None
if parent_identifier:
parent_group = findGroup(gitlab_instance, parent_identifier)
parent_group = find_group(gitlab_instance, parent_identifier)
if not parent_group:
module.fail_json(msg="Failed create GitLab group: Parent group doesn't exists")
group_exists = gitlab_group.existsGroup(parent_group.full_path + '/' + group_path)
group_exists = gitlab_group.exists_group(parent_group.full_path + '/' + group_path)
else:
group_exists = gitlab_group.existsGroup(group_path)
group_exists = gitlab_group.exists_group(group_path)
if state == 'absent':
if group_exists:
gitlab_group.deleteGroup()
gitlab_group.delete_group()
module.exit_json(changed=True, msg="Successfully deleted group %s" % group_name)
else:
module.exit_json(changed=False, msg="Group deleted or does not exists")
if state == 'present':
if gitlab_group.createOrUpdateGroup(group_name, parent_group, {
"path": group_path,
"description": description,
"visibility": group_visibility,
"project_creation_level": project_creation_level,
"auto_devops_enabled": auto_devops_enabled,
"subgroup_creation_level": subgroup_creation_level,
"require_two_factor_authentication": require_two_factor_authentication,
}):
module.exit_json(changed=True, msg="Successfully created or updated the group %s" % group_name, group=gitlab_group.groupObject._attrs)
if gitlab_group.create_or_update_group(group_name, parent_group, {
"path": group_path,
"description": description,
"visibility": group_visibility,
"project_creation_level": project_creation_level,
"auto_devops_enabled": auto_devops_enabled,
"subgroup_creation_level": subgroup_creation_level,
"require_two_factor_authentication": require_two_factor_authentication,
}):
module.exit_json(changed=True, msg="Successfully created or updated the group %s" % group_name, group=gitlab_group.group_object._attrs)
else:
module.exit_json(changed=False, msg="No need to update the group %s" % group_name, group=gitlab_group.groupObject._attrs)
module.exit_json(changed=False, msg="No need to update the group %s" % group_name, group=gitlab_group.group_object._attrs)
if __name__ == '__main__':

View File

@@ -155,7 +155,7 @@ RETURN = r''' # '''
from ansible.module_utils.api import basic_auth_argument_spec
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible_collections.community.general.plugins.module_utils.gitlab import gitlabAuthentication
from ansible_collections.community.general.plugins.module_utils.gitlab import gitlab_authentication
import traceback
@@ -288,7 +288,7 @@ def main():
'reporter': gitlab.REPORTER_ACCESS,
'developer': gitlab.DEVELOPER_ACCESS,
'maintainer': gitlab.MAINTAINER_ACCESS,
'owner': gitlab.OWNER_ACCESS
'owner': gitlab.OWNER_ACCESS,
}
gitlab_group = module.params['gitlab_group']
@@ -300,7 +300,7 @@ def main():
purge_users = [access_level_int[level] for level in purge_users]
# connect to gitlab server
gl = gitlabAuthentication(module)
gl = gitlab_authentication(module)
group = GitLabGroup(module, gl)

View File

@@ -144,7 +144,7 @@ except Exception:
GITLAB_IMP_ERR = traceback.format_exc()
HAS_GITLAB_PACKAGE = False
from ansible_collections.community.general.plugins.module_utils.gitlab import gitlabAuthentication
from ansible_collections.community.general.plugins.module_utils.gitlab import gitlab_authentication
class GitlabGroupVariables(object):
@@ -170,9 +170,13 @@ class GitlabGroupVariables(object):
def create_variable(self, key, value, masked, protected, variable_type):
if self._module.check_mode:
return
return self.group.variables.create({"key": key, "value": value,
"masked": masked, "protected": protected,
"variable_type": variable_type})
return self.group.variables.create({
"key": key,
"value": value,
"masked": masked,
"protected": protected,
"variable_type": variable_type,
})
def update_variable(self, key, var, value, masked, protected, variable_type):
if var.value == value and var.protected == protected and var.masked == masked and var.variable_type == variable_type:
@@ -226,11 +230,14 @@ def native_python_main(this_gitlab, purge, var_list, state, module):
existing_variables[index] = None
if state == 'present':
single_change = this_gitlab.update_variable(key,
gitlab_keys[index],
value, masked,
protected,
variable_type)
single_change = this_gitlab.update_variable(
key,
gitlab_keys[index],
value,
masked,
protected,
variable_type,
)
change = single_change or change
if single_change:
return_value['updated'].append(key)
@@ -291,7 +298,7 @@ def main():
if not HAS_GITLAB_PACKAGE:
module.fail_json(msg=missing_required_lib("python-gitlab"), exception=GITLAB_IMP_ERR)
gitlab_instance = gitlabAuthentication(module)
gitlab_instance = gitlab_authentication(module)
this_gitlab = GitlabGroupVariables(module=module, gitlab_instance=gitlab_instance)

View File

@@ -176,14 +176,14 @@ from ansible.module_utils.api import basic_auth_argument_spec
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.common.text.converters import to_native
from ansible_collections.community.general.plugins.module_utils.gitlab import findProject, gitlabAuthentication
from ansible_collections.community.general.plugins.module_utils.gitlab import find_project, gitlab_authentication
class GitLabHook(object):
def __init__(self, module, gitlab_instance):
self._module = module
self._gitlab = gitlab_instance
self.hookObject = None
self.hook_object = None
'''
@param project Project Object
@@ -191,12 +191,12 @@ class GitLabHook(object):
@param description Description of the group
@param parent Parent group full path
'''
def createOrUpdateHook(self, project, hook_url, options):
def create_or_update_hook(self, project, hook_url, options):
changed = False
# Because we have already call userExists in main()
if self.hookObject is None:
hook = self.createHook(project, {
if self.hook_object is None:
hook = self.create_hook(project, {
'url': hook_url,
'push_events': options['push_events'],
'push_events_branch_filter': options['push_events_branch_filter'],
@@ -208,10 +208,11 @@ class GitLabHook(object):
'pipeline_events': options['pipeline_events'],
'wiki_page_events': options['wiki_page_events'],
'enable_ssl_verification': options['enable_ssl_verification'],
'token': options['token']})
'token': options['token'],
})
changed = True
else:
changed, hook = self.updateHook(self.hookObject, {
changed, hook = self.update_hook(self.hook_object, {
'push_events': options['push_events'],
'push_events_branch_filter': options['push_events_branch_filter'],
'issues_events': options['issues_events'],
@@ -222,9 +223,10 @@ class GitLabHook(object):
'pipeline_events': options['pipeline_events'],
'wiki_page_events': options['wiki_page_events'],
'enable_ssl_verification': options['enable_ssl_verification'],
'token': options['token']})
'token': options['token'],
})
self.hookObject = hook
self.hook_object = hook
if changed:
if self._module.check_mode:
self._module.exit_json(changed=True, msg="Successfully created or updated the hook %s" % hook_url)
@@ -241,7 +243,7 @@ class GitLabHook(object):
@param project Project Object
@param arguments Attributes of the hook
'''
def createHook(self, project, arguments):
def create_hook(self, project, arguments):
if self._module.check_mode:
return True
@@ -253,7 +255,7 @@ class GitLabHook(object):
@param hook Hook Object
@param arguments Attributes of the hook
'''
def updateHook(self, hook, arguments):
def update_hook(self, hook, arguments):
changed = False
for arg_key, arg_value in arguments.items():
@@ -268,7 +270,7 @@ class GitLabHook(object):
@param project Project object
@param hook_url Url to call on event
'''
def findHook(self, project, hook_url):
def find_hook(self, project, hook_url):
hooks = project.hooks.list()
for hook in hooks:
if (hook.url == hook_url):
@@ -278,19 +280,19 @@ class GitLabHook(object):
@param project Project object
@param hook_url Url to call on event
'''
def existsHook(self, project, hook_url):
# When project exists, object will be stored in self.projectObject.
hook = self.findHook(project, hook_url)
def exists_hook(self, project, hook_url):
# When project exists, object will be stored in self.project_object.
hook = self.find_hook(project, hook_url)
if hook:
self.hookObject = hook
self.hook_object = hook
return True
return False
def deleteHook(self):
def delete_hook(self):
if self._module.check_mode:
return True
return self.hookObject.delete()
return self.hook_object.delete()
def main():
@@ -346,41 +348,42 @@ def main():
if not HAS_GITLAB_PACKAGE:
module.fail_json(msg=missing_required_lib("python-gitlab"), exception=GITLAB_IMP_ERR)
gitlab_instance = gitlabAuthentication(module)
gitlab_instance = gitlab_authentication(module)
gitlab_hook = GitLabHook(module, gitlab_instance)
project = findProject(gitlab_instance, project_identifier)
project = find_project(gitlab_instance, project_identifier)
if project is None:
module.fail_json(msg="Failed to create hook: project %s doesn't exists" % project_identifier)
hook_exists = gitlab_hook.existsHook(project, hook_url)
hook_exists = gitlab_hook.exists_hook(project, hook_url)
if state == 'absent':
if hook_exists:
gitlab_hook.deleteHook()
gitlab_hook.delete_hook()
module.exit_json(changed=True, msg="Successfully deleted hook %s" % hook_url)
else:
module.exit_json(changed=False, msg="Hook deleted or does not exists")
if state == 'present':
if gitlab_hook.createOrUpdateHook(project, hook_url, {
"push_events": push_events,
"push_events_branch_filter": push_events_branch_filter,
"issues_events": issues_events,
"merge_requests_events": merge_requests_events,
"tag_push_events": tag_push_events,
"note_events": note_events,
"job_events": job_events,
"pipeline_events": pipeline_events,
"wiki_page_events": wiki_page_events,
"enable_ssl_verification": enable_ssl_verification,
"token": hook_token}):
if gitlab_hook.create_or_update_hook(project, hook_url, {
"push_events": push_events,
"push_events_branch_filter": push_events_branch_filter,
"issues_events": issues_events,
"merge_requests_events": merge_requests_events,
"tag_push_events": tag_push_events,
"note_events": note_events,
"job_events": job_events,
"pipeline_events": pipeline_events,
"wiki_page_events": wiki_page_events,
"enable_ssl_verification": enable_ssl_verification,
"token": hook_token,
}):
module.exit_json(changed=True, msg="Successfully created or updated the hook %s" % hook_url, hook=gitlab_hook.hookObject._attrs)
module.exit_json(changed=True, msg="Successfully created or updated the hook %s" % hook_url, hook=gitlab_hook.hook_object._attrs)
else:
module.exit_json(changed=False, msg="No need to update the hook %s" % hook_url, hook=gitlab_hook.hookObject._attrs)
module.exit_json(changed=False, msg="No need to update the hook %s" % hook_url, hook=gitlab_hook.hook_object._attrs)
if __name__ == '__main__':

View File

@@ -237,21 +237,21 @@ from ansible.module_utils.api import basic_auth_argument_spec
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.common.text.converters import to_native
from ansible_collections.community.general.plugins.module_utils.gitlab import findGroup, findProject, gitlabAuthentication
from ansible_collections.community.general.plugins.module_utils.gitlab import find_group, find_project, gitlab_authentication
class GitLabProject(object):
def __init__(self, module, gitlab_instance):
self._module = module
self._gitlab = gitlab_instance
self.projectObject = None
self.project_object = None
'''
@param project_name Name of the project
@param namespace Namespace Object (User or Group)
@param options Options of the project
'''
def createOrUpdateProject(self, project_name, namespace, options):
def create_or_update_project(self, project_name, namespace, options):
changed = False
project_options = {
'name': project_name,
@@ -273,20 +273,20 @@ class GitLabProject(object):
'shared_runners_enabled': options['shared_runners_enabled'],
}
# Because we have already call userExists in main()
if self.projectObject is None:
if self.project_object is None:
project_options.update({
'path': options['path'],
'import_url': options['import_url'],
})
if options['initialize_with_readme']:
project_options['initialize_with_readme'] = options['initialize_with_readme']
project_options = self.getOptionsWithValue(project_options)
project = self.createProject(namespace, project_options)
project_options = self.get_options_with_value(project_options)
project = self.create_project(namespace, project_options)
changed = True
else:
changed, project = self.updateProject(self.projectObject, project_options)
changed, project = self.update_project(self.project_object, project_options)
self.projectObject = project
self.project_object = project
if changed:
if self._module.check_mode:
self._module.exit_json(changed=True, msg="Successfully created or updated the project %s" % project_name)
@@ -302,7 +302,7 @@ class GitLabProject(object):
@param namespace Namespace Object (User or Group)
@param arguments Attributes of the project
'''
def createProject(self, namespace, arguments):
def create_project(self, namespace, arguments):
if self._module.check_mode:
return True
@@ -317,7 +317,7 @@ class GitLabProject(object):
'''
@param arguments Attributes of the project
'''
def getOptionsWithValue(self, arguments):
def get_options_with_value(self, arguments):
ret_arguments = dict()
for arg_key, arg_value in arguments.items():
if arguments[arg_key] is not None:
@@ -329,7 +329,7 @@ class GitLabProject(object):
@param project Project Object
@param arguments Attributes of the project
'''
def updateProject(self, project, arguments):
def update_project(self, project, arguments):
changed = False
for arg_key, arg_value in arguments.items():
@@ -340,11 +340,11 @@ class GitLabProject(object):
return (changed, project)
def deleteProject(self):
def delete_project(self):
if self._module.check_mode:
return True
project = self.projectObject
project = self.project_object
return project.delete()
@@ -352,11 +352,11 @@ class GitLabProject(object):
@param namespace User/Group object
@param name Name of the project
'''
def existsProject(self, namespace, path):
# When project exists, object will be stored in self.projectObject.
project = findProject(self._gitlab, namespace.full_path + '/' + path)
def exists_project(self, namespace, path):
# When project exists, object will be stored in self.project_object.
project = find_project(self._gitlab, namespace.full_path + '/' + path)
if project:
self.projectObject = project
self.project_object = project
return True
return False
@@ -433,7 +433,7 @@ def main():
if not HAS_GITLAB_PACKAGE:
module.fail_json(msg=missing_required_lib("python-gitlab"), exception=GITLAB_IMP_ERR)
gitlab_instance = gitlabAuthentication(module)
gitlab_instance = gitlab_authentication(module)
# Set project_path to project_name if it is empty.
if project_path is None:
@@ -444,7 +444,7 @@ def main():
namespace = None
namespace_id = None
if group_identifier:
group = findGroup(gitlab_instance, group_identifier)
group = find_group(gitlab_instance, group_identifier)
if group is None:
module.fail_json(msg="Failed to create project: group %s doesn't exists" % group_identifier)
@@ -466,40 +466,40 @@ def main():
if not namespace:
module.fail_json(msg="Failed to find the namespace for the project")
project_exists = gitlab_project.existsProject(namespace, project_path)
project_exists = gitlab_project.exists_project(namespace, project_path)
if state == 'absent':
if project_exists:
gitlab_project.deleteProject()
gitlab_project.delete_project()
module.exit_json(changed=True, msg="Successfully deleted project %s" % project_name)
module.exit_json(changed=False, msg="Project deleted or does not exists")
if state == 'present':
if gitlab_project.createOrUpdateProject(project_name, namespace, {
"path": project_path,
"description": project_description,
"initialize_with_readme": initialize_with_readme,
"issues_enabled": issues_enabled,
"merge_requests_enabled": merge_requests_enabled,
"merge_method": merge_method,
"wiki_enabled": wiki_enabled,
"snippets_enabled": snippets_enabled,
"visibility": visibility,
"import_url": import_url,
"lfs_enabled": lfs_enabled,
"allow_merge_on_skipped_pipeline": allow_merge_on_skipped_pipeline,
"only_allow_merge_if_all_discussions_are_resolved": only_allow_merge_if_all_discussions_are_resolved,
"only_allow_merge_if_pipeline_succeeds": only_allow_merge_if_pipeline_succeeds,
"packages_enabled": packages_enabled,
"remove_source_branch_after_merge": remove_source_branch_after_merge,
"squash_option": squash_option,
"ci_config_path": ci_config_path,
"shared_runners_enabled": shared_runners_enabled,
}):
if gitlab_project.create_or_update_project(project_name, namespace, {
"path": project_path,
"description": project_description,
"initialize_with_readme": initialize_with_readme,
"issues_enabled": issues_enabled,
"merge_requests_enabled": merge_requests_enabled,
"merge_method": merge_method,
"wiki_enabled": wiki_enabled,
"snippets_enabled": snippets_enabled,
"visibility": visibility,
"import_url": import_url,
"lfs_enabled": lfs_enabled,
"allow_merge_on_skipped_pipeline": allow_merge_on_skipped_pipeline,
"only_allow_merge_if_all_discussions_are_resolved": only_allow_merge_if_all_discussions_are_resolved,
"only_allow_merge_if_pipeline_succeeds": only_allow_merge_if_pipeline_succeeds,
"packages_enabled": packages_enabled,
"remove_source_branch_after_merge": remove_source_branch_after_merge,
"squash_option": squash_option,
"ci_config_path": ci_config_path,
"shared_runners_enabled": shared_runners_enabled,
}):
module.exit_json(changed=True, msg="Successfully created or updated the project %s" % project_name, project=gitlab_project.projectObject._attrs)
module.exit_json(changed=False, msg="No need to update the project %s" % project_name, project=gitlab_project.projectObject._attrs)
module.exit_json(changed=True, msg="Successfully created or updated the project %s" % project_name, project=gitlab_project.project_object._attrs)
module.exit_json(changed=False, msg="No need to update the project %s" % project_name, project=gitlab_project.project_object._attrs)
if __name__ == '__main__':

View File

@@ -176,7 +176,7 @@ RETURN = r''' # '''
from ansible.module_utils.api import basic_auth_argument_spec
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible_collections.community.general.plugins.module_utils.gitlab import gitlabAuthentication
from ansible_collections.community.general.plugins.module_utils.gitlab import gitlab_authentication
import traceback
@@ -317,7 +317,7 @@ def main():
purge_users = [access_level_int[level] for level in purge_users]
# connect to gitlab server
gl = gitlabAuthentication(module)
gl = gitlab_authentication(module)
project = GitLabProjectMembers(module, gl)

View File

@@ -143,7 +143,7 @@ except Exception:
GITLAB_IMP_ERR = traceback.format_exc()
HAS_GITLAB_PACKAGE = False
from ansible_collections.community.general.plugins.module_utils.gitlab import gitlabAuthentication
from ansible_collections.community.general.plugins.module_utils.gitlab import gitlab_authentication
class GitlabProjectVariables(object):
@@ -172,7 +172,7 @@ class GitlabProjectVariables(object):
var = {
"key": key, "value": value,
"masked": masked, "protected": protected,
"variable_type": variable_type
"variable_type": variable_type,
}
if environment_scope is not None:
var["environment_scope"] = environment_scope
@@ -300,7 +300,7 @@ def main():
if not HAS_GITLAB_PACKAGE:
module.fail_json(msg=missing_required_lib("python-gitlab"), exception=GITLAB_IMP_ERR)
gitlab_instance = gitlabAuthentication(module)
gitlab_instance = gitlab_authentication(module)
this_gitlab = GitlabProjectVariables(module=module, gitlab_instance=gitlab_instance)

View File

@@ -87,7 +87,7 @@ except Exception:
GITLAB_IMP_ERR = traceback.format_exc()
HAS_GITLAB_PACKAGE = False
from ansible_collections.community.general.plugins.module_utils.gitlab import gitlabAuthentication
from ansible_collections.community.general.plugins.module_utils.gitlab import gitlab_authentication
class GitlabProtectedBranch(object):
@@ -179,7 +179,7 @@ def main():
module.fail_json(msg="community.general.gitlab_proteched_branch requires python-gitlab Python module >= 2.3.0 (installed version: [%s])."
" Please upgrade python-gitlab to version 2.3.0 or above." % gitlab_version)
gitlab_instance = gitlabAuthentication(module)
gitlab_instance = gitlab_authentication(module)
this_gitlab = GitlabProtectedBranch(module=module, project=project, gitlab_instance=gitlab_instance)
p_branch = this_gitlab.protected_branch_exist(name=name)

View File

@@ -186,7 +186,7 @@ from ansible.module_utils.api import basic_auth_argument_spec
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.common.text.converters import to_native
from ansible_collections.community.general.plugins.module_utils.gitlab import gitlabAuthentication
from ansible_collections.community.general.plugins.module_utils.gitlab import gitlab_authentication
try:
cmp
@@ -203,32 +203,34 @@ class GitLabRunner(object):
# See https://gitlab.com/gitlab-org/gitlab-ce/issues/60774
# for group runner token access
self._runners_endpoint = project.runners if project else gitlab_instance.runners
self.runnerObject = None
self.runner_object = None
def createOrUpdateRunner(self, description, options):
def create_or_update_runner(self, description, options):
changed = False
# Because we have already call userExists in main()
if self.runnerObject is None:
runner = self.createRunner({
if self.runner_object is None:
runner = self.create_runner({
'description': description,
'active': options['active'],
'token': options['registration_token'],
'locked': options['locked'],
'run_untagged': options['run_untagged'],
'maximum_timeout': options['maximum_timeout'],
'tag_list': options['tag_list']})
'tag_list': options['tag_list'],
})
changed = True
else:
changed, runner = self.updateRunner(self.runnerObject, {
changed, runner = self.update_runner(self.runner_object, {
'active': options['active'],
'locked': options['locked'],
'run_untagged': options['run_untagged'],
'maximum_timeout': options['maximum_timeout'],
'access_level': options['access_level'],
'tag_list': options['tag_list']})
'tag_list': options['tag_list'],
})
self.runnerObject = runner
self.runner_object = runner
if changed:
if self._module.check_mode:
self._module.exit_json(changed=True, msg="Successfully created or updated the runner %s" % description)
@@ -244,7 +246,7 @@ class GitLabRunner(object):
'''
@param arguments Attributes of the runner
'''
def createRunner(self, arguments):
def create_runner(self, arguments):
if self._module.check_mode:
return True
@@ -259,7 +261,7 @@ class GitLabRunner(object):
@param runner Runner object
@param arguments Attributes of the runner
'''
def updateRunner(self, runner, arguments):
def update_runner(self, runner, arguments):
changed = False
for arg_key, arg_value in arguments.items():
@@ -282,7 +284,7 @@ class GitLabRunner(object):
'''
@param description Description of the runner
'''
def findRunner(self, description, owned=False):
def find_runner(self, description, owned=False):
if owned:
runners = self._runners_endpoint.list(as_list=False)
else:
@@ -301,20 +303,20 @@ class GitLabRunner(object):
'''
@param description Description of the runner
'''
def existsRunner(self, description, owned=False):
# When runner exists, object will be stored in self.runnerObject.
runner = self.findRunner(description, owned)
def exists_runner(self, description, owned=False):
# When runner exists, object will be stored in self.runner_object.
runner = self.find_runner(description, owned)
if runner:
self.runnerObject = runner
self.runner_object = runner
return True
return False
def deleteRunner(self):
def delete_runner(self):
if self._module.check_mode:
return True
runner = self.runnerObject
runner = self.runner_object
return runner.delete()
@@ -369,7 +371,7 @@ def main():
if not HAS_GITLAB_PACKAGE:
module.fail_json(msg=missing_required_lib("python-gitlab"), exception=GITLAB_IMP_ERR)
gitlab_instance = gitlabAuthentication(module)
gitlab_instance = gitlab_authentication(module)
gitlab_project = None
if project:
try:
@@ -378,28 +380,29 @@ def main():
module.fail_json(msg='No such a project %s' % project, exception=to_native(e))
gitlab_runner = GitLabRunner(module, gitlab_instance, gitlab_project)
runner_exists = gitlab_runner.existsRunner(runner_description, owned)
runner_exists = gitlab_runner.exists_runner(runner_description, owned)
if state == 'absent':
if runner_exists:
gitlab_runner.deleteRunner()
gitlab_runner.delete_runner()
module.exit_json(changed=True, msg="Successfully deleted runner %s" % runner_description)
else:
module.exit_json(changed=False, msg="Runner deleted or does not exists")
if state == 'present':
if gitlab_runner.createOrUpdateRunner(runner_description, {
"active": runner_active,
"tag_list": tag_list,
"run_untagged": run_untagged,
"locked": runner_locked,
"access_level": access_level,
"maximum_timeout": maximum_timeout,
"registration_token": registration_token}):
module.exit_json(changed=True, runner=gitlab_runner.runnerObject._attrs,
if gitlab_runner.create_or_update_runner(runner_description, {
"active": runner_active,
"tag_list": tag_list,
"run_untagged": run_untagged,
"locked": runner_locked,
"access_level": access_level,
"maximum_timeout": maximum_timeout,
"registration_token": registration_token,
}):
module.exit_json(changed=True, runner=gitlab_runner.runner_object._attrs,
msg="Successfully created or updated the runner %s" % runner_description)
else:
module.exit_json(changed=False, runner=gitlab_runner.runnerObject._attrs,
module.exit_json(changed=False, runner=gitlab_runner.runner_object._attrs,
msg="No need to update the runner %s" % runner_description)

View File

@@ -238,33 +238,34 @@ from ansible.module_utils.api import basic_auth_argument_spec
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.common.text.converters import to_native
from ansible_collections.community.general.plugins.module_utils.gitlab import findGroup, gitlabAuthentication
from ansible_collections.community.general.plugins.module_utils.gitlab import find_group, gitlab_authentication
class GitLabUser(object):
def __init__(self, module, gitlab_instance):
self._module = module
self._gitlab = gitlab_instance
self.userObject = None
self.user_object = None
self.ACCESS_LEVEL = {
'guest': gitlab.GUEST_ACCESS,
'reporter': gitlab.REPORTER_ACCESS,
'developer': gitlab.DEVELOPER_ACCESS,
'master': gitlab.MAINTAINER_ACCESS,
'maintainer': gitlab.MAINTAINER_ACCESS,
'owner': gitlab.OWNER_ACCESS}
'owner': gitlab.OWNER_ACCESS,
}
'''
@param username Username of the user
@param options User options
'''
def createOrUpdateUser(self, username, options):
def create_or_update_user(self, username, options):
changed = False
potentionally_changed = False
# Because we have already call userExists in main()
if self.userObject is None:
user = self.createUser({
if self.user_object is None:
user = self.create_user({
'name': options['name'],
'username': username,
'password': options['password'],
@@ -277,8 +278,8 @@ class GitLabUser(object):
})
changed = True
else:
changed, user = self.updateUser(
self.userObject, {
changed, user = self.update_user(
self.user_object, {
# add "normal" parameters here, put uncheckable
# params in the dict below
'name': {'value': options['name']},
@@ -313,7 +314,7 @@ class GitLabUser(object):
# Assign ssh keys
if options['sshkey_name'] and options['sshkey_file']:
key_changed = self.addSshKeyToUser(user, {
key_changed = self.add_ssh_key_to_user(user, {
'name': options['sshkey_name'],
'file': options['sshkey_file'],
'expires_at': options['sshkey_expires_at']})
@@ -321,10 +322,10 @@ class GitLabUser(object):
# Assign group
if options['group_path']:
group_changed = self.assignUserToGroup(user, options['group_path'], options['access_level'])
group_changed = self.assign_user_to_group(user, options['group_path'], options['access_level'])
changed = changed or group_changed
self.userObject = user
self.user_object = user
if (changed or potentionally_changed) and not self._module.check_mode:
try:
user.save()
@@ -341,7 +342,7 @@ class GitLabUser(object):
'''
@param group User object
'''
def getUserId(self, user):
def get_user_id(self, user):
if user is not None:
return user.id
return None
@@ -350,7 +351,7 @@ class GitLabUser(object):
@param user User object
@param sshkey_name Name of the ssh key
'''
def sshKeyExists(self, user, sshkey_name):
def ssh_key_exists(self, user, sshkey_name):
keyList = map(lambda k: k.title, user.keys.list())
return sshkey_name in keyList
@@ -359,8 +360,8 @@ class GitLabUser(object):
@param user User object
@param sshkey Dict containing sshkey infos {"name": "", "file": "", "expires_at": ""}
'''
def addSshKeyToUser(self, user, sshkey):
if not self.sshKeyExists(user, sshkey['name']):
def add_ssh_key_to_user(self, user, sshkey):
if not self.ssh_key_exists(user, sshkey['name']):
if self._module.check_mode:
return True
@@ -381,7 +382,7 @@ class GitLabUser(object):
@param group Group object
@param user_id Id of the user to find
'''
def findMember(self, group, user_id):
def find_member(self, group, user_id):
try:
member = group.members.get(user_id)
except gitlab.exceptions.GitlabGetError:
@@ -392,8 +393,8 @@ class GitLabUser(object):
@param group Group object
@param user_id Id of the user to check
'''
def memberExists(self, group, user_id):
member = self.findMember(group, user_id)
def member_exists(self, group, user_id):
member = self.find_member(group, user_id)
return member is not None
@@ -402,8 +403,8 @@ class GitLabUser(object):
@param user_id Id of the user to check
@param access_level GitLab access_level to check
'''
def memberAsGoodAccessLevel(self, group, user_id, access_level):
member = self.findMember(group, user_id)
def member_as_good_access_level(self, group, user_id, access_level):
member = self.find_member(group, user_id)
return member.access_level == access_level
@@ -412,8 +413,8 @@ class GitLabUser(object):
@param group_path Complete path of the Group including parent group path. <parent_path>/<group_path>
@param access_level GitLab access_level to assign
'''
def assignUserToGroup(self, user, group_identifier, access_level):
group = findGroup(self._gitlab, group_identifier)
def assign_user_to_group(self, user, group_identifier, access_level):
group = find_group(self._gitlab, group_identifier)
if self._module.check_mode:
return True
@@ -421,16 +422,16 @@ class GitLabUser(object):
if group is None:
return False
if self.memberExists(group, self.getUserId(user)):
member = self.findMember(group, self.getUserId(user))
if not self.memberAsGoodAccessLevel(group, member.id, self.ACCESS_LEVEL[access_level]):
if self.member_exists(group, self.get_user_id(user)):
member = self.find_member(group, self.get_user_id(user))
if not self.member_as_good_access_level(group, member.id, self.ACCESS_LEVEL[access_level]):
member.access_level = self.ACCESS_LEVEL[access_level]
member.save()
return True
else:
try:
group.members.create({
'user_id': self.getUserId(user),
'user_id': self.get_user_id(user),
'access_level': self.ACCESS_LEVEL[access_level]})
except gitlab.exceptions.GitlabCreateError as e:
self._module.fail_json(msg="Failed to assign user to group: %s" % to_native(e))
@@ -441,7 +442,7 @@ class GitLabUser(object):
@param user User object
@param arguments User attributes
'''
def updateUser(self, user, arguments, uncheckable_args):
def update_user(self, user, arguments, uncheckable_args):
changed = False
for arg_key, arg_value in arguments.items():
@@ -449,7 +450,7 @@ class GitLabUser(object):
if av is not None:
if arg_key == "identities":
changed = self.addIdentities(user, av, uncheckable_args['overwrite_identities']['value'])
changed = self.add_identities(user, av, uncheckable_args['overwrite_identities']['value'])
elif getattr(user, arg_key) != av:
setattr(user, arg_value.get('setter', arg_key), av)
@@ -466,7 +467,7 @@ class GitLabUser(object):
'''
@param arguments User attributes
'''
def createUser(self, arguments):
def create_user(self, arguments):
if self._module.check_mode:
return True
@@ -478,7 +479,7 @@ class GitLabUser(object):
try:
user = self._gitlab.users.create(arguments)
if identities:
self.addIdentities(user, identities)
self.add_identities(user, identities)
except (gitlab.exceptions.GitlabCreateError) as e:
self._module.fail_json(msg="Failed to create user: %s " % to_native(e))
@@ -490,10 +491,10 @@ class GitLabUser(object):
@param identites List of identities to be added/updated
@param overwrite_identities Overwrite user identities with identities passed to this module
'''
def addIdentities(self, user, identities, overwrite_identities=False):
def add_identities(self, user, identities, overwrite_identities=False):
changed = False
if overwrite_identities:
changed = self.deleteIdentities(user, identities)
changed = self.delete_identities(user, identities)
for identity in identities:
if identity not in user.identities:
@@ -508,7 +509,7 @@ class GitLabUser(object):
@param user User object
@param identites List of identities to be added/updated
'''
def deleteIdentities(self, user, identities):
def delete_identities(self, user, identities):
changed = False
for identity in user.identities:
if identity not in identities:
@@ -520,7 +521,7 @@ class GitLabUser(object):
'''
@param username Username of the user
'''
def findUser(self, username):
def find_user(self, username):
users = self._gitlab.users.list(search=username)
for user in users:
if (user.username == username):
@@ -529,42 +530,42 @@ class GitLabUser(object):
'''
@param username Username of the user
'''
def existsUser(self, username):
# When user exists, object will be stored in self.userObject.
user = self.findUser(username)
def exists_user(self, username):
# When user exists, object will be stored in self.user_object.
user = self.find_user(username)
if user:
self.userObject = user
self.user_object = user
return True
return False
'''
@param username Username of the user
'''
def isActive(self, username):
user = self.findUser(username)
def is_active(self, username):
user = self.find_user(username)
return user.attributes['state'] == 'active'
def deleteUser(self):
def delete_user(self):
if self._module.check_mode:
return True
user = self.userObject
user = self.user_object
return user.delete()
def blockUser(self):
def block_user(self):
if self._module.check_mode:
return True
user = self.userObject
user = self.user_object
return user.block()
def unblockUser(self):
def unblock_user(self):
if self._module.check_mode:
return True
user = self.userObject
user = self.user_object
return user.unblock()
@@ -636,55 +637,56 @@ def main():
if not HAS_GITLAB_PACKAGE:
module.fail_json(msg=missing_required_lib("python-gitlab"), exception=GITLAB_IMP_ERR)
gitlab_instance = gitlabAuthentication(module)
gitlab_instance = gitlab_authentication(module)
gitlab_user = GitLabUser(module, gitlab_instance)
user_exists = gitlab_user.existsUser(user_username)
user_exists = gitlab_user.exists_user(user_username)
if user_exists:
user_is_active = gitlab_user.isActive(user_username)
user_is_active = gitlab_user.is_active(user_username)
else:
user_is_active = False
if state == 'absent':
if user_exists:
gitlab_user.deleteUser()
gitlab_user.delete_user()
module.exit_json(changed=True, msg="Successfully deleted user %s" % user_username)
else:
module.exit_json(changed=False, msg="User deleted or does not exists")
if state == 'blocked':
if user_exists and user_is_active:
gitlab_user.blockUser()
gitlab_user.block_user()
module.exit_json(changed=True, msg="Successfully blocked user %s" % user_username)
else:
module.exit_json(changed=False, msg="User already blocked or does not exists")
if state == 'unblocked':
if user_exists and not user_is_active:
gitlab_user.unblockUser()
gitlab_user.unblock_user()
module.exit_json(changed=True, msg="Successfully unblocked user %s" % user_username)
else:
module.exit_json(changed=False, msg="User is not blocked or does not exists")
if state == 'present':
if gitlab_user.createOrUpdateUser(user_username, {
"name": user_name,
"password": user_password,
"reset_password": user_reset_password,
"email": user_email,
"sshkey_name": user_sshkey_name,
"sshkey_file": user_sshkey_file,
"sshkey_expires_at": user_sshkey_expires_at,
"group_path": group_path,
"access_level": access_level,
"confirm": confirm,
"isadmin": user_isadmin,
"external": user_external,
"identities": user_identities,
"overwrite_identities": overwrite_identities}):
module.exit_json(changed=True, msg="Successfully created or updated the user %s" % user_username, user=gitlab_user.userObject._attrs)
if gitlab_user.create_or_update_user(user_username, {
"name": user_name,
"password": user_password,
"reset_password": user_reset_password,
"email": user_email,
"sshkey_name": user_sshkey_name,
"sshkey_file": user_sshkey_file,
"sshkey_expires_at": user_sshkey_expires_at,
"group_path": group_path,
"access_level": access_level,
"confirm": confirm,
"isadmin": user_isadmin,
"external": user_external,
"identities": user_identities,
"overwrite_identities": overwrite_identities,
}):
module.exit_json(changed=True, msg="Successfully created or updated the user %s" % user_username, user=gitlab_user.user_object._attrs)
else:
module.exit_json(changed=False, msg="No need to update the user %s" % user_username, user=gitlab_user.userObject._attrs)
module.exit_json(changed=False, msg="No need to update the user %s" % user_username, user=gitlab_user.user_object._attrs)
if __name__ == '__main__':

View File

@@ -13,11 +13,25 @@ module: listen_ports_facts
author:
- Nathan Davison (@ndavison)
description:
- Gather facts on processes listening on TCP and UDP ports using netstat command.
- Gather facts on processes listening on TCP and UDP ports using the C(netstat) or C(ss) commands.
- This module currently supports Linux only.
requirements:
- netstat
- netstat or ss
short_description: Gather facts on processes listening on TCP and UDP ports.
notes:
- |
C(ss) returns all processes for each listen address and port.
This plugin will return each of them, so multiple entries for the same listen address and port are likely in results.
options:
command:
description:
- Override which command to use for fetching listen ports.
- 'By default module will use first found supported command on the system (in alphanumerical order).'
type: str
choices:
- netstat
- ss
version_added: 4.1.0
'''
EXAMPLES = r'''
@@ -181,10 +195,87 @@ def netStatParse(raw):
return results
def main():
def ss_parse(raw):
results = list()
regex_conns = re.compile(pattern=r'\[?(.+?)\]?:([0-9]+)')
regex_pid = re.compile(pattern=r'"(.*?)",pid=(\d+)')
lines = raw.splitlines()
if len(lines) == 0 or not lines[0].startswith('Netid '):
# unexpected stdout from ss
raise EnvironmentError('Unknown stdout format of `ss`: {0}'.format(raw))
# skip headers (-H arg is not present on e.g. Ubuntu 16)
lines = lines[1:]
for line in lines:
cells = line.split(None, 6)
try:
if len(cells) == 6:
# no process column, e.g. due to unprivileged user
process = str()
protocol, state, recv_q, send_q, local_addr_port, peer_addr_port = cells
else:
protocol, state, recv_q, send_q, local_addr_port, peer_addr_port, process = cells
except ValueError:
# unexpected stdout from ss
raise EnvironmentError(
'Expected `ss` table layout "Netid, State, Recv-Q, Send-Q, Local Address:Port, Peer Address:Port" and optionally "Process", \
but got something else: {0}'.format(line)
)
conns = regex_conns.search(local_addr_port)
pids = regex_pid.findall(process)
if conns is None and pids is None:
continue
if pids is None:
# likely unprivileged user, so add empty name & pid
# as we do in netstat logic to be consistent with output
pids = [(str(), 0)]
address = conns.group(1)
port = conns.group(2)
for name, pid in pids:
result = {
'pid': int(pid),
'address': address,
'port': int(port),
'protocol': protocol,
'name': name
}
results.append(result)
return results
def main():
commands_map = {
'netstat': {
'args': [
'-p',
'-l',
'-u',
'-n',
'-t',
],
'parse_func': netStatParse
},
'ss': {
'args': [
'-p',
'-l',
'-u',
'-n',
'-t',
],
'parse_func': ss_parse
},
}
module = AnsibleModule(
argument_spec={},
argument_spec=dict(
command=dict(type='str', choices=list(sorted(commands_map)))
),
supports_check_mode=True,
)
@@ -220,18 +311,34 @@ def main():
}
try:
netstat_cmd = module.get_bin_path('netstat', True)
command = None
bin_path = None
if module.params['command'] is not None:
command = module.params['command']
bin_path = module.get_bin_path(command, required=True)
else:
for c in sorted(commands_map):
bin_path = module.get_bin_path(c, required=False)
if bin_path is not None:
command = c
break
if bin_path is None:
raise EnvironmentError(msg='Unable to find any of the supported commands in PATH: {0}'.format(", ".join(sorted(commands_map))))
# which ports are listening for connections?
rc, stdout, stderr = module.run_command([netstat_cmd, '-plunt'])
args = commands_map[command]['args']
rc, stdout, stderr = module.run_command([bin_path] + args)
if rc == 0:
netstatOut = netStatParse(stdout)
for p in netstatOut:
parse_func = commands_map[command]['parse_func']
results = parse_func(stdout)
for p in results:
p['stime'] = getPidSTime(p['pid'])
p['user'] = getPidUser(p['pid'])
if p['protocol'] == 'tcp':
if p['protocol'].startswith('tcp'):
result['ansible_facts']['tcp_listen'].append(p)
elif p['protocol'] == 'udp':
elif p['protocol'].startswith('udp'):
result['ansible_facts']['udp_listen'].append(p)
except (KeyError, EnvironmentError) as e:
module.fail_json(msg=to_native(e))

View File

@@ -86,6 +86,14 @@ options:
- Whether the list of nodes in the persistent iSCSI database should be returned by the module.
type: bool
default: false
rescan:
description:
- Rescan an established session for discovering new targets.
- When I(target) is omitted, will rescan all sessions.
type: bool
default: false
version_added: 4.1.0
'''
EXAMPLES = r'''
@@ -124,6 +132,11 @@ EXAMPLES = r'''
portal: 10.1.1.250
auto_portal_startup: false
target: iqn.1986-03.com.sun:02:f8c1f9e0-c3ec-ec84-c9c9-8bfb0cd5de3d
- name: Rescan one or all established sessions to discover new targets (omit target for all sessions)
community.general.open_iscsi:
rescan: true
target: iqn.1986-03.com.sun:02:f8c1f9e0-c3ec-ec84-c9c9-8bfb0cd5de3d
'''
import glob
@@ -179,6 +192,15 @@ def iscsi_discover(module, portal, port):
module.run_command(cmd, check_rc=True)
def iscsi_rescan(module, target=None):
if target is None:
cmd = [iscsiadm_cmd, '--mode', 'session', '--rescan']
else:
cmd = [iscsiadm_cmd, '--mode', 'node', '--rescan', '-T', target]
rc, out, err = module.run_command(cmd)
return out
def target_loggedon(module, target, portal=None, port=None):
cmd = [iscsiadm_cmd, '--mode', 'session']
rc, out, err = module.run_command(cmd)
@@ -305,6 +327,7 @@ def main():
auto_portal_startup=dict(type='bool'),
discover=dict(type='bool', default=False),
show_nodes=dict(type='bool', default=False),
rescan=dict(type='bool', default=False),
),
required_together=[['node_user', 'node_pass'], ['node_user_in', 'node_pass_in']],
@@ -330,6 +353,7 @@ def main():
automatic_portal = module.params['auto_portal_startup']
discover = module.params['discover']
show_nodes = module.params['show_nodes']
rescan = module.params['rescan']
check = module.check_mode
@@ -421,6 +445,10 @@ def main():
result['changed'] |= True
result['automatic_portal_changed'] = True
if rescan is not False:
result['changed'] = True
result['sessions'] = iscsi_rescan(module, target)
module.exit_json(**result)

View File

@@ -6,3 +6,4 @@ skip/osx
skip/rhel8.2
skip/rhel8.3
skip/rhel8.4
skip/rhel8.5

View File

@@ -9,30 +9,25 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
- name: install netstat and netcat on deb
apt:
name: "{{ item }}"
ansible.builtin.package:
name:
- net-tools
- netcat
state: latest
with_items:
- net-tools
- netcat
when: ansible_os_family == "Debian"
- name: install netstat and netcat on rh < 7
yum:
name: "{{ item }}"
ansible.builtin.package:
name:
- net-tools
- nc.x86_64
state: latest
with_items:
- net-tools
- nc.x86_64
when: ansible_os_family == "RedHat" and ansible_distribution_major_version|int < 7
- name: install netstat and netcat on rh >= 7
yum:
name: "{{ item }}"
- name: install netcat on rh >= 7
ansible.builtin.package:
name: 'nmap-ncat'
state: latest
with_items:
- net-tools
- nmap-ncat
when: ansible_os_family == "RedHat" and ansible_distribution_major_version|int >= 7
- name: start UDP server on port 5555
@@ -63,6 +58,16 @@
listen_ports_facts:
when: ansible_os_family == "RedHat" or ansible_os_family == "Debian"
- name: Gather listening ports facts explicitly via netstat
listen_ports_facts:
command: 'netstat'
when: (ansible_os_family == "RedHat" and ansible_distribution_major_version|int < 7) or ansible_os_family == "Debian"
- name: Gather listening ports facts explicitly via ss
listen_ports_facts:
command: 'ss'
when: ansible_os_family == "RedHat" and ansible_distribution_major_version|int >= 7
- name: check for ansible_facts.udp_listen exists
assert:
that: ansible_facts.udp_listen is defined

View File

@@ -0,0 +1,211 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Jeffrey van Pelt <jeff@vanpelt.one>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# The API responses used in these tests were recorded from PVE version 6.2.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.inventory.data import InventoryData
from ansible_collections.community.general.plugins.inventory.xen_orchestra import InventoryModule
objects = {
'vms': {
'0e64588-2bea-2d82-e922-881654b0a48f':
{
'type': 'VM',
'addresses': {},
'CPUs': {'max': 4, 'number': 4},
'memory': {'dynamic': [1073741824, 2147483648], 'static': [536870912, 4294967296], 'size': 2147483648},
'name_description': '',
'name_label': 'XCP-NG lab 2',
'os_version': {},
'parent': 'd3af89b2-d846-0874-6acb-031ccf11c560',
'power_state': 'Running',
'tags': [],
'id': '0e645898-2bea-2d82-e922-881654b0a48f',
'uuid': '0e645898-2bea-2d82-e922-881654b0a48f',
'$pool': '3d315997-73bd-5a74-8ca7-289206cb03ab',
'$poolId': '3d315997-73bd-5a74-8ca7-289206cb03ab',
'$container': '222d8594-9426-468a-ad69-7a6f02330fa3'
},
'b0d25e70-019d-6182-2f7c-b0f5d8ef9331':
{
'type': 'VM',
'addresses': {'0/ipv4/0': '192.168.1.55', '1/ipv4/0': '10.0.90.1'},
'CPUs': {'max': 4, 'number': 4},
'mainIpAddress': '192.168.1.55',
'memory': {'dynamic': [2147483648, 2147483648], 'static': [134217728, 2147483648], 'size': 2147483648},
'name_description': '',
'name_label': 'XCP-NG lab 3',
'os_version': {'name': 'FreeBSD 11.3-STABLE', 'uname': '11.3-STABLE', 'distro': 'FreeBSD'},
'power_state': 'Halted',
'tags': [],
'id': 'b0d25e70-019d-6182-2f7c-b0f5d8ef9331',
'uuid': 'b0d25e70-019d-6182-2f7c-b0f5d8ef9331',
'$pool': '3d315997-73bd-5a74-8ca7-289206cb03ab',
'$poolId': '3d315997-73bd-5a74-8ca7-289206cb03ab',
'$container': 'c96ec4dd-28ac-4df4-b73c-4371bd202728',
}
},
'pools': {
'3d315997-73bd-5a74-8ca7-289206cb03ab': {
'master': '222d8594-9426-468a-ad69-7a6f02330fa3',
'tags': [],
'name_description': '',
'name_label': 'Storage Lab',
'cpus': {'cores': 120, 'sockets': 6},
'id': '3d315997-73bd-5a74-8ca7-289206cb03ab',
'type': 'pool',
'uuid': '3d315997-73bd-5a74-8ca7-289206cb03ab',
'$pool': '3d315997-73bd-5a74-8ca7-289206cb03ab',
'$poolId': '3d315997-73bd-5a74-8ca7-289206cb03ab'
}
},
'hosts': {
'c96ec4dd-28ac-4df4-b73c-4371bd202728': {
'type': 'host',
'uuid': 'c96ec4dd-28ac-4df4-b73c-4371bd202728',
'enabled': True,
'CPUs': {
'cpu_count': '40',
'socket_count': '2',
'vendor': 'GenuineIntel',
'speed': '1699.998',
'modelname': 'Intel(R) Xeon(R) CPU E5-2650L v2 @ 1.70GHz',
'family': '6',
'model': '62',
'stepping': '4'
},
'address': '172.16.210.14',
'build': 'release/stockholm/master/7',
'cpus': {'cores': 40, 'sockets': 2},
'hostname': 'r620-s1',
'name_description': 'Default install',
'name_label': 'R620-S1',
'memory': {'usage': 45283590144, 'size': 137391292416},
'power_state': 'Running',
'tags': [],
'version': '8.2.0',
'productBrand': 'XCP-ng',
'id': 'c96ec4dd-28ac-4df4-b73c-4371bd202728',
'$pool': '3d315997-73bd-5a74-8ca7-289206cb03ab',
'$poolId': '3d315997-73bd-5a74-8ca7-289206cb03ab'
},
'222d8594-9426-468a-ad69-7a6f02330fa3': {
'type': 'host',
'uuid': '222d8594-9426-468a-ad69-7a6f02330fa3',
'enabled': True,
'CPUs': {
'cpu_count': '40',
'socket_count': '2',
'vendor': 'GenuineIntel',
'speed': '1700.007',
'modelname': 'Intel(R) Xeon(R) CPU E5-2650L v2 @ 1.70GHz',
'family': '6',
'model': '62',
'stepping': '4'
},
'address': '172.16.210.16',
'build': 'release/stockholm/master/7',
'cpus': {'cores': 40, 'sockets': 2},
'hostname': 'r620-s2',
'name_description': 'Default install',
'name_label': 'R620-S2',
'memory': {'usage': 10636521472, 'size': 137391292416},
'power_state': 'Running',
'tags': ['foo', 'bar', 'baz'],
'version': '8.2.0',
'productBrand': 'XCP-ng',
'id': '222d8594-9426-468a-ad69-7a6f02330fa3',
'$pool': '3d315997-73bd-5a74-8ca7-289206cb03ab',
'$poolId': '3d315997-73bd-5a74-8ca7-289206cb03ab'
}
}
}
def get_option(option):
if option == 'groups':
return {}
elif option == 'keyed_groups':
return []
elif option == 'compose':
return {}
elif option == 'strict':
return False
else:
return None
def serialize_groups(groups):
return list(map(str, groups))
@ pytest.fixture(scope="module")
def inventory():
r = InventoryModule()
r.inventory = InventoryData()
return r
def test_verify_file_bad_config(inventory):
assert inventory.verify_file('foobar.xen_orchestra.yml') is False
def test_populate(inventory, mocker):
inventory.get_option = mocker.MagicMock(side_effect=get_option)
inventory._populate(objects)
actual = sorted(inventory.inventory.hosts.keys())
expected = sorted(['c96ec4dd-28ac-4df4-b73c-4371bd202728', '222d8594-9426-468a-ad69-7a6f02330fa3',
'0e64588-2bea-2d82-e922-881654b0a48f', 'b0d25e70-019d-6182-2f7c-b0f5d8ef9331'])
assert actual == expected
# Host with ip assertions
host_with_ip = inventory.inventory.get_host(
'b0d25e70-019d-6182-2f7c-b0f5d8ef9331')
host_with_ip_vars = host_with_ip.vars
assert host_with_ip_vars['ansible_host'] == '192.168.1.55'
assert host_with_ip_vars['power_state'] == 'halted'
assert host_with_ip_vars['type'] == 'VM'
assert host_with_ip in inventory.inventory.groups['with_ip'].hosts
# Host without ip
host_without_ip = inventory.inventory.get_host(
'0e64588-2bea-2d82-e922-881654b0a48f')
host_without_ip_vars = host_without_ip.vars
assert host_without_ip_vars['ansible_host'] is None
assert host_without_ip_vars['power_state'] == 'running'
assert host_without_ip in inventory.inventory.groups['without_ip'].hosts
assert host_with_ip in inventory.inventory.groups['xo_host_r620_s1'].hosts
assert host_without_ip in inventory.inventory.groups['xo_host_r620_s2'].hosts
r620_s1 = inventory.inventory.get_host(
'c96ec4dd-28ac-4df4-b73c-4371bd202728')
r620_s2 = inventory.inventory.get_host(
'222d8594-9426-468a-ad69-7a6f02330fa3')
assert r620_s1.vars['address'] == '172.16.210.14'
assert r620_s1.vars['tags'] == []
assert r620_s2.vars['address'] == '172.16.210.16'
assert r620_s2.vars['tags'] == ['foo', 'bar', 'baz']
storage_lab = inventory.inventory.groups['xo_pool_storage_lab']
# Check that hosts are in their corresponding pool
assert r620_s1 in storage_lab.hosts
assert r620_s2 in storage_lab.hosts
# Check that hosts are in their corresponding pool
assert host_without_ip in storage_lab.hosts
assert host_with_ip in storage_lab.hosts

View File

@@ -0,0 +1,44 @@
# -*- coding: utf-8 -*-
# Copyright: (c) 2021, RevBits <info@revbits.com>
# GNU General Public License v3.0+ (see COPYING or
# https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible_collections.community.general.tests.unit.compat.unittest import TestCase
from ansible_collections.community.general.tests.unit.compat.mock import (
patch,
MagicMock,
)
from ansible_collections.community.general.plugins.lookup import revbitspss
from ansible.plugins.loader import lookup_loader
class MockPamSecrets(MagicMock):
RESPONSE = 'dummy value'
def get_pam_secret(self, path):
return self.RESPONSE
class TestLookupModule(TestCase):
def setUp(self):
revbitspss.ANOTHER_LIBRARY_IMPORT_ERROR = None
self.lookup = lookup_loader.get("community.general.revbitspss")
@patch(
"ansible_collections.community.general.plugins.lookup.revbitspss.LookupModule.Client",
MockPamSecrets(),
)
def test_get_pam_secret(self):
terms = ['dummy secret']
variables = []
kwargs = {
"base_url": 'https://dummy.url',
"api_key": 'dummy'
}
self.assertListEqual(
[{'dummy secret': 'dummy value'}],
self.lookup.run(terms, variables, **kwargs)
)

View File

@@ -563,6 +563,37 @@ ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
"""
TESTCASE_ETHERNET_STATIC_MULTIPLE_IP4_ADDRESSES = [
{
'type': 'ethernet',
'conn_name': 'non_existent_nw_device',
'ifname': 'ethernet_non_existant',
'ip4': ['10.10.10.10/24', '10.10.20.10/24'],
'gw4': '10.10.10.1',
'dns4': ['1.1.1.1', '8.8.8.8'],
'state': 'present',
'_ansible_check_mode': False,
}
]
TESTCASE_ETHERNET_STATIC_MULTIPLE_IP4_ADDRESSES_SHOW_OUTPUT = """\
connection.id: non_existent_nw_device
connection.interface-name: ethernet_non_existant
connection.autoconnect: yes
802-3-ethernet.mtu: auto
ipv4.method: manual
ipv4.addresses: 10.10.10.10/24,10.10.20.10/24
ipv4.gateway: 10.10.10.1
ipv4.ignore-auto-dns: no
ipv4.ignore-auto-routes: no
ipv4.never-default: no
ipv4.may-fail: yes
ipv4.dns: 1.1.1.1,8.8.8.8
ipv6.method: auto
ipv6.ignore-auto-dns: no
ipv6.ignore-auto-routes: no
"""
TESTCASE_WIRELESS = [
{
'type': 'wifi',
@@ -920,6 +951,24 @@ def mocked_ethernet_connection_static_unchanged(mocker):
execute_return=(0, TESTCASE_ETHERNET_STATIC_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_ethernet_connection_static_multiple_ip4_addresses_unchanged(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=(0, TESTCASE_ETHERNET_STATIC_MULTIPLE_IP4_ADDRESSES_SHOW_OUTPUT, ""))
@pytest.fixture
def mocked_ethernet_connection_static_modify(mocker):
mocker_set(mocker,
connection_exists=True,
execute_return=None,
execute_side_effect=(
(0, TESTCASE_ETHERNET_STATIC_SHOW_OUTPUT, ""),
(0, "", ""),
))
@pytest.fixture
def mocked_ethernet_connection_dhcp_to_static(mocker):
mocker_set(mocker,
@@ -2456,3 +2505,83 @@ def test_gsm_connection_unchanged(mocked_gsm_connection_unchanged, capfd):
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_ETHERNET_STATIC_MULTIPLE_IP4_ADDRESSES, indirect=['patch_ansible_module'])
def test_create_ethernet_with_mulitple_ip4_addresses_static(mocked_generic_connection_create, capfd):
"""
Test : Create ethernet connection with static IP configuration
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 2
arg_list = nmcli.Nmcli.execute_command.call_args_list
add_args, add_kw = arg_list[0]
assert add_args[0][0] == '/usr/bin/nmcli'
assert add_args[0][1] == 'con'
assert add_args[0][2] == 'add'
assert add_args[0][3] == 'type'
assert add_args[0][4] == 'ethernet'
assert add_args[0][5] == 'con-name'
assert add_args[0][6] == 'non_existent_nw_device'
add_args_text = list(map(to_text, add_args[0]))
for param in ['connection.interface-name', 'ethernet_non_existant',
'ipv4.addresses', '10.10.10.10/24,10.10.20.10/24',
'ipv4.gateway', '10.10.10.1',
'ipv4.dns', '1.1.1.1,8.8.8.8']:
assert param in add_args_text
up_args, up_kw = arg_list[1]
assert up_args[0][0] == '/usr/bin/nmcli'
assert up_args[0][1] == 'con'
assert up_args[0][2] == 'up'
assert up_args[0][3] == 'non_existent_nw_device'
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_ETHERNET_STATIC_MULTIPLE_IP4_ADDRESSES, indirect=['patch_ansible_module'])
def test_ethernet_connection_static_with_mulitple_ip4_addresses_unchanged(mocked_ethernet_connection_static_multiple_ip4_addresses_unchanged, capfd):
"""
Test : Ethernet connection with static IP configuration unchanged
"""
with pytest.raises(SystemExit):
nmcli.main()
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert not results['changed']
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_ETHERNET_STATIC_MULTIPLE_IP4_ADDRESSES, indirect=['patch_ansible_module'])
def test_add_second_ip4_address_to_ethernet_connection(mocked_ethernet_connection_static_modify, capfd):
"""
Test : Modify ethernet connection from DHCP to static
"""
with pytest.raises(SystemExit):
nmcli.main()
assert nmcli.Nmcli.execute_command.call_count == 2
arg_list = nmcli.Nmcli.execute_command.call_args_list
args, kwargs = arg_list[1]
assert args[0][0] == '/usr/bin/nmcli'
assert args[0][1] == 'con'
assert args[0][2] == 'modify'
assert args[0][3] == 'non_existent_nw_device'
for param in ['ipv4.addresses', '10.10.10.10/24,10.10.20.10/24']:
assert param in args[0]
out, err = capfd.readouterr()
results = json.loads(out)
assert not results.get('failed')
assert results['changed']

View File

@@ -71,6 +71,28 @@ def get_repo_mock(url, request):
return response(200, content, headers, None, 5, request)
@urlmatch(netloc=r'api\.github\.com(:[0-9]+)?$', path=r'/repos/.*/.*', method="get")
def get_private_repo_mock(url, request):
match = re.search(
r"api\.github\.com(:[0-9]+)?/repos/(?P<org>[^/]+)/(?P<repo>[^/]+)", request.url)
org = match.group("org")
repo = match.group("repo")
# https://docs.github.com/en/rest/reference/repos#get-a-repository
headers = {'content-type': 'application/json'}
content = {
"name": repo,
"full_name": "{0}/{1}".format(org, repo),
"url": "https://api.github.com/repos/{0}/{1}".format(org, repo),
"private": True,
"description": "This your first repo!",
"default_branch": "master",
"allow_rebase_merge": True
}
content = json.dumps(content).encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(netloc=r'api\.github\.com(:[0-9]+)?$', path=r'/orgs/.*/repos', method="post")
def create_new_org_repo_mock(url, request):
match = re.search(
@@ -83,8 +105,8 @@ def create_new_org_repo_mock(url, request):
content = {
"name": repo['name'],
"full_name": "{0}/{1}".format(org, repo['name']),
"private": repo['private'],
"description": repo['description']
"private": repo.get('private', False),
"description": repo.get('description')
}
content = json.dumps(content).encode("utf-8")
return response(201, content, headers, None, 5, request)
@@ -99,8 +121,8 @@ def create_new_user_repo_mock(url, request):
content = {
"name": repo['name'],
"full_name": "{0}/{1}".format("octocat", repo['name']),
"private": repo['private'],
"description": repo['description']
"private": repo.get('private', False),
"description": repo.get('description')
}
content = json.dumps(content).encode("utf-8")
return response(201, content, headers, None, 5, request)
@@ -120,8 +142,8 @@ def patch_repo_mock(url, request):
"name": repo,
"full_name": "{0}/{1}".format(org, repo),
"url": "https://api.github.com/repos/{0}/{1}".format(org, repo),
"private": body['private'],
"description": body['description'],
"private": body.get('private', False),
"description": body.get('description'),
"default_branch": "master",
"allow_rebase_merge": True
}
@@ -160,11 +182,34 @@ class TestGithubRepo(unittest.TestCase):
"description": "Just for fun",
"private": False,
"state": "present",
"api_url": "https://api.github.com"
"api_url": "https://api.github.com",
"force_defaults": False,
})
self.assertEqual(result['changed'], True)
self.assertEqual(result['repo']['private'], False)
self.assertEqual(result['repo']['description'], 'Just for fun')
@with_httmock(get_orgs_mock)
@with_httmock(get_repo_notfound_mock)
@with_httmock(create_new_org_repo_mock)
def test_create_new_org_repo_incomplete(self):
result = github_repo.run_module({
'username': None,
'password': None,
"access_token": "mytoken",
"organization": "MyOrganization",
"name": "myrepo",
"description": None,
"private": None,
"state": "present",
"api_url": "https://api.github.com",
"force_defaults": False,
})
self.assertEqual(result['changed'], True)
self.assertEqual(result['repo']['private'], False)
self.assertEqual(result['repo']['description'], None)
@with_httmock(get_user_mock)
@with_httmock(get_repo_notfound_mock)
@@ -179,7 +224,8 @@ class TestGithubRepo(unittest.TestCase):
"description": "Just for fun",
"private": True,
"state": "present",
"api_url": "https://api.github.com"
"api_url": "https://api.github.com",
"force_defaults": False,
})
self.assertEqual(result['changed'], True)
self.assertEqual(result['repo']['private'], True)
@@ -197,11 +243,31 @@ class TestGithubRepo(unittest.TestCase):
"description": "Just for fun",
"private": True,
"state": "present",
"api_url": "https://api.github.com"
"api_url": "https://api.github.com",
"force_defaults": False,
})
self.assertEqual(result['changed'], True)
self.assertEqual(result['repo']['private'], True)
@with_httmock(get_orgs_mock)
@with_httmock(get_private_repo_mock)
def test_idempotency_existing_org_private_repo(self):
result = github_repo.run_module({
'username': None,
'password': None,
"access_token": "mytoken",
"organization": "MyOrganization",
"name": "myrepo",
"description": None,
"private": None,
"state": "present",
"api_url": "https://api.github.com",
"force_defaults": False,
})
self.assertEqual(result['changed'], False)
self.assertEqual(result['repo']['private'], True)
self.assertEqual(result['repo']['description'], 'This your first repo!')
@with_httmock(get_orgs_mock)
@with_httmock(get_repo_mock)
@with_httmock(delete_repo_mock)
@@ -215,7 +281,8 @@ class TestGithubRepo(unittest.TestCase):
"description": "Just for fun",
"private": False,
"state": "absent",
"api_url": "https://api.github.com"
"api_url": "https://api.github.com",
"force_defaults": False,
})
self.assertEqual(result['changed'], True)
@@ -232,7 +299,8 @@ class TestGithubRepo(unittest.TestCase):
"description": "Just for fun",
"private": False,
"state": "absent",
"api_url": "https://api.github.com"
"api_url": "https://api.github.com",
"force_defaults": False,
})
self.assertEqual(result['changed'], True)
@@ -249,7 +317,8 @@ class TestGithubRepo(unittest.TestCase):
"description": "Just for fun",
"private": True,
"state": "absent",
"api_url": "https://api.github.com"
"api_url": "https://api.github.com",
"force_defaults": False,
})
self.assertEqual(result['changed'], False)

View File

@@ -55,11 +55,11 @@ class TestGitlabDeployKey(GitlabModuleTestCase):
def test_deploy_key_exist(self):
project = self.gitlab_instance.projects.get(1)
rvalue = self.moduleUtil.existsDeployKey(project, "Public key")
rvalue = self.moduleUtil.exists_deploy_key(project, "Public key")
self.assertEqual(rvalue, True)
rvalue = self.moduleUtil.existsDeployKey(project, "Private key")
rvalue = self.moduleUtil.exists_deploy_key(project, "Private key")
self.assertEqual(rvalue, False)
@@ -68,11 +68,11 @@ class TestGitlabDeployKey(GitlabModuleTestCase):
def test_create_deploy_key(self):
project = self.gitlab_instance.projects.get(1)
deploy_key = self.moduleUtil.createDeployKey(project, {"title": "Public key",
"key": "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM"
"4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt4596k6YjzGGphH2TUxwKzxc"
"KDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfD"
"zpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0="})
deploy_key = self.moduleUtil.create_deploy_key(project, {"title": "Public key",
"key": "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM"
"4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt4596k6YjzGGphH2TUxwKzxc"
"KDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfD"
"zpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0="})
self.assertEqual(type(deploy_key), ProjectKey)
self.assertEqual(deploy_key.title, "Public key")
@@ -82,15 +82,15 @@ class TestGitlabDeployKey(GitlabModuleTestCase):
@with_httmock(resp_create_project_deploy_key)
def test_update_deploy_key(self):
project = self.gitlab_instance.projects.get(1)
deployKey = self.moduleUtil.findDeployKey(project, "Public key")
deploy_key = self.moduleUtil.find_deploy_key(project, "Public key")
changed, newDeploy_key = self.moduleUtil.updateDeployKey(deployKey, {"title": "Private key"})
changed, newDeploy_key = self.moduleUtil.update_deploy_key(deploy_key, {"title": "Private key"})
self.assertEqual(changed, True)
self.assertEqual(type(newDeploy_key), ProjectKey)
self.assertEqual(newDeploy_key.title, "Private key")
changed, newDeploy_key = self.moduleUtil.updateDeployKey(deployKey, {"title": "Private key"})
changed, newDeploy_key = self.moduleUtil.update_deploy_key(deploy_key, {"title": "Private key"})
self.assertEqual(changed, False)
self.assertEqual(newDeploy_key.title, "Private key")
@@ -101,8 +101,8 @@ class TestGitlabDeployKey(GitlabModuleTestCase):
def test_delete_deploy_key(self):
project = self.gitlab_instance.projects.get(1)
self.moduleUtil.existsDeployKey(project, "Public key")
self.moduleUtil.exists_deploy_key(project, "Public key")
rvalue = self.moduleUtil.deleteDeployKey()
rvalue = self.moduleUtil.delete_deploy_key()
self.assertEqual(rvalue, None)

View File

@@ -54,24 +54,25 @@ class TestGitlabGroup(GitlabModuleTestCase):
@with_httmock(resp_get_group)
def test_exist_group(self):
rvalue = self.moduleUtil.existsGroup(1)
rvalue = self.moduleUtil.exists_group(1)
self.assertEqual(rvalue, True)
@with_httmock(resp_get_missing_group)
def test_exist_group(self):
rvalue = self.moduleUtil.existsGroup(1)
rvalue = self.moduleUtil.exists_group(1)
self.assertEqual(rvalue, False)
@with_httmock(resp_create_group)
def test_create_group(self):
group = self.moduleUtil.createGroup({'name': "Foobar Group",
'path': "foo-bar",
'description': "An interesting group",
'project_creation_level': "developer",
'subgroup_creation_level': "maintainer",
'require_two_factor_authentication': True})
group = self.moduleUtil.create_group({'name': "Foobar Group",
'path': "foo-bar",
'description': "An interesting group",
'project_creation_level': "developer",
'subgroup_creation_level': "maintainer",
'require_two_factor_authentication': True,
})
self.assertEqual(type(group), Group)
self.assertEqual(group.name, "Foobar Group")
@@ -84,11 +85,12 @@ class TestGitlabGroup(GitlabModuleTestCase):
@with_httmock(resp_create_subgroup)
def test_create_subgroup(self):
group = self.moduleUtil.createGroup({'name': "BarFoo Group",
'path': "bar-foo",
'parent_id': 1,
'project_creation_level': "noone",
'require_two_factor_authentication': True})
group = self.moduleUtil.create_group({'name': "BarFoo Group",
'path': "bar-foo",
'parent_id': 1,
'project_creation_level': "noone",
'require_two_factor_authentication': True,
})
self.assertEqual(type(group), Group)
self.assertEqual(group.name, "BarFoo Group")
@@ -101,10 +103,11 @@ class TestGitlabGroup(GitlabModuleTestCase):
@with_httmock(resp_get_group)
def test_update_group(self):
group = self.gitlab_instance.groups.get(1)
changed, newGroup = self.moduleUtil.updateGroup(group, {'name': "BarFoo Group",
'visibility': "private",
'project_creation_level': "maintainer",
'require_two_factor_authentication': True})
changed, newGroup = self.moduleUtil.update_group(group, {'name': "BarFoo Group",
'visibility': "private",
'project_creation_level': "maintainer",
'require_two_factor_authentication': True,
})
self.assertEqual(changed, True)
self.assertEqual(newGroup.name, "BarFoo Group")
@@ -112,7 +115,7 @@ class TestGitlabGroup(GitlabModuleTestCase):
self.assertEqual(newGroup.project_creation_level, "maintainer")
self.assertEqual(newGroup.require_two_factor_authentication, True)
changed, newGroup = self.moduleUtil.updateGroup(group, {'name': "BarFoo Group"})
changed, newGroup = self.moduleUtil.update_group(group, {'name': "BarFoo Group"})
self.assertEqual(changed, False)
@@ -120,10 +123,10 @@ class TestGitlabGroup(GitlabModuleTestCase):
@with_httmock(resp_find_group_project)
@with_httmock(resp_delete_group)
def test_delete_group(self):
self.moduleUtil.existsGroup(1)
self.moduleUtil.exists_group(1)
print(self.moduleUtil.groupObject.projects)
print(self.moduleUtil.group_object.projects)
rvalue = self.moduleUtil.deleteGroup()
rvalue = self.moduleUtil.delete_group()
self.assertEqual(rvalue, None)

View File

@@ -55,11 +55,11 @@ class TestGitlabHook(GitlabModuleTestCase):
def test_hook_exist(self):
project = self.gitlab_instance.projects.get(1)
rvalue = self.moduleUtil.existsHook(project, "http://example.com/hook")
rvalue = self.moduleUtil.exists_hook(project, "http://example.com/hook")
self.assertEqual(rvalue, True)
rvalue = self.moduleUtil.existsHook(project, "http://gitlab.com/hook")
rvalue = self.moduleUtil.exists_hook(project, "http://gitlab.com/hook")
self.assertEqual(rvalue, False)
@@ -68,7 +68,7 @@ class TestGitlabHook(GitlabModuleTestCase):
def test_create_hook(self):
project = self.gitlab_instance.projects.get(1)
hook = self.moduleUtil.createHook(project, {"url": "http://example.com/hook"})
hook = self.moduleUtil.create_hook(project, {"url": "http://example.com/hook"})
self.assertEqual(type(hook), ProjectHook)
self.assertEqual(hook.url, "http://example.com/hook")
@@ -77,15 +77,15 @@ class TestGitlabHook(GitlabModuleTestCase):
@with_httmock(resp_find_project_hook)
def test_update_hook(self):
project = self.gitlab_instance.projects.get(1)
hook = self.moduleUtil.findHook(project, "http://example.com/hook")
hook = self.moduleUtil.find_hook(project, "http://example.com/hook")
changed, newHook = self.moduleUtil.updateHook(hook, {"url": "http://gitlab.com/hook"})
changed, newHook = self.moduleUtil.update_hook(hook, {"url": "http://gitlab.com/hook"})
self.assertEqual(changed, True)
self.assertEqual(type(newHook), ProjectHook)
self.assertEqual(newHook.url, "http://gitlab.com/hook")
changed, newHook = self.moduleUtil.updateHook(hook, {"url": "http://gitlab.com/hook"})
changed, newHook = self.moduleUtil.update_hook(hook, {"url": "http://gitlab.com/hook"})
self.assertEqual(changed, False)
self.assertEqual(newHook.url, "http://gitlab.com/hook")
@@ -96,8 +96,8 @@ class TestGitlabHook(GitlabModuleTestCase):
def test_delete_hook(self):
project = self.gitlab_instance.projects.get(1)
self.moduleUtil.existsHook(project, "http://example.com/hook")
self.moduleUtil.exists_hook(project, "http://example.com/hook")
rvalue = self.moduleUtil.deleteHook()
rvalue = self.moduleUtil.delete_hook()
self.assertEqual(rvalue, None)

View File

@@ -59,11 +59,11 @@ class TestGitlabProject(GitlabModuleTestCase):
def test_project_exist(self):
group = self.gitlab_instance.groups.get(1)
rvalue = self.moduleUtil.existsProject(group, "diaspora-client")
rvalue = self.moduleUtil.exists_project(group, "diaspora-client")
self.assertEqual(rvalue, True)
rvalue = self.moduleUtil.existsProject(group, "missing-project")
rvalue = self.moduleUtil.exists_project(group, "missing-project")
self.assertEqual(rvalue, False)
@@ -71,7 +71,7 @@ class TestGitlabProject(GitlabModuleTestCase):
@with_httmock(resp_create_project)
def test_create_project(self):
group = self.gitlab_instance.groups.get(1)
project = self.moduleUtil.createProject(group, {"name": "Diaspora Client", "path": "diaspora-client", "namespace_id": group.id})
project = self.moduleUtil.create_project(group, {"name": "Diaspora Client", "path": "diaspora-client", "namespace_id": group.id})
self.assertEqual(type(project), Project)
self.assertEqual(project.name, "Diaspora Client")
@@ -80,13 +80,13 @@ class TestGitlabProject(GitlabModuleTestCase):
def test_update_project(self):
project = self.gitlab_instance.projects.get(1)
changed, newProject = self.moduleUtil.updateProject(project, {"name": "New Name"})
changed, newProject = self.moduleUtil.update_project(project, {"name": "New Name"})
self.assertEqual(changed, True)
self.assertEqual(type(newProject), Project)
self.assertEqual(newProject.name, "New Name")
changed, newProject = self.moduleUtil.updateProject(project, {"name": "New Name"})
changed, newProject = self.moduleUtil.update_project(project, {"name": "New Name"})
self.assertEqual(changed, False)
self.assertEqual(newProject.name, "New Name")
@@ -98,14 +98,14 @@ class TestGitlabProject(GitlabModuleTestCase):
# merge_method should be 'merge' by default
self.assertEqual(project.merge_method, "merge")
changed, newProject = self.moduleUtil.updateProject(project, {"name": "New Name", "merge_method": "rebase_merge"})
changed, newProject = self.moduleUtil.update_project(project, {"name": "New Name", "merge_method": "rebase_merge"})
self.assertEqual(changed, True)
self.assertEqual(type(newProject), Project)
self.assertEqual(newProject.name, "New Name")
self.assertEqual(newProject.merge_method, "rebase_merge")
changed, newProject = self.moduleUtil.updateProject(project, {"name": "New Name", "merge_method": "rebase_merge"})
changed, newProject = self.moduleUtil.update_project(project, {"name": "New Name", "merge_method": "rebase_merge"})
self.assertEqual(changed, False)
self.assertEqual(newProject.name, "New Name")
@@ -117,8 +117,8 @@ class TestGitlabProject(GitlabModuleTestCase):
def test_delete_project(self):
group = self.gitlab_instance.groups.get(1)
self.moduleUtil.existsProject(group, "diaspora-client")
self.moduleUtil.exists_project(group, "diaspora-client")
rvalue = self.moduleUtil.deleteProject()
rvalue = self.moduleUtil.delete_project()
self.assertEqual(rvalue, None)

View File

@@ -54,28 +54,28 @@ class TestGitlabRunner(GitlabModuleTestCase):
@with_httmock(resp_find_runners_all)
@with_httmock(resp_get_runner)
def test_runner_exist_all(self):
rvalue = self.moduleUtil.existsRunner("test-1-20150125")
rvalue = self.moduleUtil.exists_runner("test-1-20150125")
self.assertEqual(rvalue, True)
rvalue = self.moduleUtil.existsRunner("test-3-00000000")
rvalue = self.moduleUtil.exists_runner("test-3-00000000")
self.assertEqual(rvalue, False)
@with_httmock(resp_find_runners_list)
@with_httmock(resp_get_runner)
def test_runner_exist_owned(self):
rvalue = self.moduleUtil.existsRunner("test-1-20201214", True)
rvalue = self.moduleUtil.exists_runner("test-1-20201214", True)
self.assertEqual(rvalue, True)
rvalue = self.moduleUtil.existsRunner("test-3-00000000", True)
rvalue = self.moduleUtil.exists_runner("test-3-00000000", True)
self.assertEqual(rvalue, False)
@with_httmock(resp_create_runner)
def test_create_runner(self):
runner = self.moduleUtil.createRunner({"token": "token", "description": "test-1-20150125"})
runner = self.moduleUtil.create_runner({"token": "token", "description": "test-1-20150125"})
self.assertEqual(type(runner), Runner)
self.assertEqual(runner.description, "test-1-20150125")
@@ -83,15 +83,15 @@ class TestGitlabRunner(GitlabModuleTestCase):
@with_httmock(resp_find_runners_all)
@with_httmock(resp_get_runner)
def test_update_runner(self):
runner = self.moduleUtil.findRunner("test-1-20150125")
runner = self.moduleUtil.find_runner("test-1-20150125")
changed, newRunner = self.moduleUtil.updateRunner(runner, {"description": "Runner description"})
changed, newRunner = self.moduleUtil.update_runner(runner, {"description": "Runner description"})
self.assertEqual(changed, True)
self.assertEqual(type(newRunner), Runner)
self.assertEqual(newRunner.description, "Runner description")
changed, newRunner = self.moduleUtil.updateRunner(runner, {"description": "Runner description"})
changed, newRunner = self.moduleUtil.update_runner(runner, {"description": "Runner description"})
self.assertEqual(changed, False)
self.assertEqual(newRunner.description, "Runner description")
@@ -100,8 +100,8 @@ class TestGitlabRunner(GitlabModuleTestCase):
@with_httmock(resp_get_runner)
@with_httmock(resp_delete_runner)
def test_delete_runner(self):
self.moduleUtil.existsRunner("test-1-20150125")
self.moduleUtil.exists_runner("test-1-20150125")
rvalue = self.moduleUtil.deleteRunner()
rvalue = self.moduleUtil.delete_runner()
self.assertEqual(rvalue, None)

View File

@@ -61,17 +61,17 @@ class TestGitlabUser(GitlabModuleTestCase):
@with_httmock(resp_find_user)
def test_exist_user(self):
rvalue = self.moduleUtil.existsUser("john_smith")
rvalue = self.moduleUtil.exists_user("john_smith")
self.assertEqual(rvalue, True)
rvalue = self.moduleUtil.existsUser("paul_smith")
rvalue = self.moduleUtil.exists_user("paul_smith")
self.assertEqual(rvalue, False)
@with_httmock(resp_find_user)
def test_find_user(self):
user = self.moduleUtil.findUser("john_smith")
user = self.moduleUtil.find_user("john_smith")
self.assertEqual(type(user), User)
self.assertEqual(user.name, "John Smith")
@@ -79,7 +79,7 @@ class TestGitlabUser(GitlabModuleTestCase):
@with_httmock(resp_create_user)
def test_create_user(self):
user = self.moduleUtil.createUser({'email': 'john@example.com', 'password': 's3cur3s3cr3T',
user = self.moduleUtil.create_user({'email': 'john@example.com', 'password': 's3cur3s3cr3T',
'username': 'john_smith', 'name': 'John Smith'})
self.assertEqual(type(user), User)
self.assertEqual(user.name, "John Smith")
@@ -89,7 +89,7 @@ class TestGitlabUser(GitlabModuleTestCase):
def test_update_user(self):
user = self.gitlab_instance.users.get(1)
changed, newUser = self.moduleUtil.updateUser(
changed, newUser = self.moduleUtil.update_user(
user,
{'name': {'value': "Jack Smith"}, "is_admin": {'value': "true", 'setter': 'admin'}}, {}
)
@@ -98,11 +98,11 @@ class TestGitlabUser(GitlabModuleTestCase):
self.assertEqual(newUser.name, "Jack Smith")
self.assertEqual(newUser.admin, "true")
changed, newUser = self.moduleUtil.updateUser(user, {'name': {'value': "Jack Smith"}}, {})
changed, newUser = self.moduleUtil.update_user(user, {'name': {'value': "Jack Smith"}}, {})
self.assertEqual(changed, False)
changed, newUser = self.moduleUtil.updateUser(
changed, newUser = self.moduleUtil.update_user(
user,
{}, {
'skip_reconfirmation': {'value': True},
@@ -118,8 +118,8 @@ class TestGitlabUser(GitlabModuleTestCase):
@with_httmock(resp_find_user)
@with_httmock(resp_delete_user)
def test_delete_user(self):
self.moduleUtil.existsUser("john_smith")
rvalue = self.moduleUtil.deleteUser()
self.moduleUtil.exists_user("john_smith")
rvalue = self.moduleUtil.delete_user()
self.assertEqual(rvalue, None)
@@ -128,10 +128,10 @@ class TestGitlabUser(GitlabModuleTestCase):
def test_sshkey_exist(self):
user = self.gitlab_instance.users.get(1)
exist = self.moduleUtil.sshKeyExists(user, "Public key")
exist = self.moduleUtil.ssh_key_exists(user, "Public key")
self.assertEqual(exist, True)
notExist = self.moduleUtil.sshKeyExists(user, "Private key")
notExist = self.moduleUtil.ssh_key_exists(user, "Private key")
self.assertEqual(notExist, False)
@with_httmock(resp_get_user)
@@ -140,7 +140,7 @@ class TestGitlabUser(GitlabModuleTestCase):
def test_create_sshkey(self):
user = self.gitlab_instance.users.get(1)
rvalue = self.moduleUtil.addSshKeyToUser(user, {
rvalue = self.moduleUtil.add_ssh_key_to_user(user, {
'name': "Public key",
'file': "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJe"
"jgt4596k6YjzGGphH2TUxwKzxcKDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4"
@@ -148,7 +148,7 @@ class TestGitlabUser(GitlabModuleTestCase):
'expires_at': ""})
self.assertEqual(rvalue, False)
rvalue = self.moduleUtil.addSshKeyToUser(user, {
rvalue = self.moduleUtil.add_ssh_key_to_user(user, {
'name': "Private key",
'file': "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDA1YotVDm2mAyk2tPt4E7AHm01sS6JZmcU"
"dRuSuA5zszUJzYPPUSRAX3BCgTqLqYx//UuVncK7YqLVSbbwjKR2Ez5lISgCnVfLVEXzwhv+"
@@ -163,7 +163,7 @@ class TestGitlabUser(GitlabModuleTestCase):
def test_find_member(self):
group = self.gitlab_instance.groups.get(1)
user = self.moduleUtil.findMember(group, 1)
user = self.moduleUtil.find_member(group, 1)
self.assertEqual(user.username, "raymond_smith")
@with_httmock(resp_get_user)
@@ -176,8 +176,8 @@ class TestGitlabUser(GitlabModuleTestCase):
group = self.gitlab_instance.groups.get(1)
user = self.gitlab_instance.users.get(1)
rvalue = self.moduleUtil.assignUserToGroup(user, group.id, "developer")
rvalue = self.moduleUtil.assign_user_to_group(user, group.id, "developer")
self.assertEqual(rvalue, False)
rvalue = self.moduleUtil.assignUserToGroup(user, group.id, "guest")
rvalue = self.moduleUtil.assign_user_to_group(user, group.id, "guest")
self.assertEqual(rvalue, True)

View File

@@ -86,7 +86,7 @@ if [ "${script}" != "sanity" ] || [ "${test}" == "sanity/extra" ]; then
# retry ansible-galaxy -vvv collection install community.internal_test_tools
fi
if [ "${script}" != "sanity" ] && [ "${script}" != "units" ]; then
if [ "${script}" != "sanity" ] && [ "${script}" != "units" ] && [ "${test}" != "sanity/extra" ]; then
CRYPTO_BRANCH=main
if [ "${script}" == "linux" ] && [[ "${test}" =~ "ubuntu1604/" ]]; then
CRYPTO_BRANCH=stable-1