mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-05-06 21:32:49 +00:00
Initial commit
This commit is contained in:
343
plugins/modules/packaging/os/apk.py
Normal file
343
plugins/modules/packaging/os/apk.py
Normal file
@@ -0,0 +1,343 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2015, Kevin Brebanov <https://github.com/kbrebanov>
|
||||
# Based on pacman (Afterburn <https://github.com/afterburn>, Aaron Bull Schaefer <aaron@elasticdog.com>)
|
||||
# and apt (Matthew Williams <matthew@flowroute.com>) modules.
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['stableinterface'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: apk
|
||||
short_description: Manages apk packages
|
||||
description:
|
||||
- Manages I(apk) packages for Alpine Linux.
|
||||
author: "Kevin Brebanov (@kbrebanov)"
|
||||
options:
|
||||
available:
|
||||
description:
|
||||
- During upgrade, reset versioned world dependencies and change logic to prefer replacing or downgrading packages (instead of holding them)
|
||||
if the currently installed package is no longer available from any repository.
|
||||
type: bool
|
||||
default: 'no'
|
||||
name:
|
||||
description:
|
||||
- A package name, like C(foo), or multiple packages, like C(foo, bar).
|
||||
type: list
|
||||
elements: str
|
||||
repository:
|
||||
description:
|
||||
- A package repository or multiple repositories.
|
||||
Unlike with the underlying apk command, this list will override the system repositories rather than supplement them.
|
||||
state:
|
||||
description:
|
||||
- Indicates the desired package(s) state.
|
||||
- C(present) ensures the package(s) is/are present.
|
||||
- C(absent) ensures the package(s) is/are absent.
|
||||
- C(latest) ensures the package(s) is/are present and the latest version(s).
|
||||
default: present
|
||||
choices: [ "present", "absent", "latest" ]
|
||||
update_cache:
|
||||
description:
|
||||
- Update repository indexes. Can be run with other steps or on it's own.
|
||||
type: bool
|
||||
default: 'no'
|
||||
upgrade:
|
||||
description:
|
||||
- Upgrade all installed packages to their latest version.
|
||||
type: bool
|
||||
default: 'no'
|
||||
notes:
|
||||
- '"name" and "upgrade" are mutually exclusive.'
|
||||
- When used with a `loop:` each package will be processed individually, it is much more efficient to pass the list directly to the `name` option.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Update repositories and install "foo" package
|
||||
- apk:
|
||||
name: foo
|
||||
update_cache: yes
|
||||
|
||||
# Update repositories and install "foo" and "bar" packages
|
||||
- apk:
|
||||
name: foo,bar
|
||||
update_cache: yes
|
||||
|
||||
# Remove "foo" package
|
||||
- apk:
|
||||
name: foo
|
||||
state: absent
|
||||
|
||||
# Remove "foo" and "bar" packages
|
||||
- apk:
|
||||
name: foo,bar
|
||||
state: absent
|
||||
|
||||
# Install the package "foo"
|
||||
- apk:
|
||||
name: foo
|
||||
state: present
|
||||
|
||||
# Install the packages "foo" and "bar"
|
||||
- apk:
|
||||
name: foo,bar
|
||||
state: present
|
||||
|
||||
# Update repositories and update package "foo" to latest version
|
||||
- apk:
|
||||
name: foo
|
||||
state: latest
|
||||
update_cache: yes
|
||||
|
||||
# Update repositories and update packages "foo" and "bar" to latest versions
|
||||
- apk:
|
||||
name: foo,bar
|
||||
state: latest
|
||||
update_cache: yes
|
||||
|
||||
# Update all installed packages to the latest versions
|
||||
- apk:
|
||||
upgrade: yes
|
||||
|
||||
# Upgrade / replace / downgrade / uninstall all installed packages to the latest versions available
|
||||
- apk:
|
||||
available: yes
|
||||
upgrade: yes
|
||||
|
||||
# Update repositories as a separate step
|
||||
- apk:
|
||||
update_cache: yes
|
||||
|
||||
# Install package from a specific repository
|
||||
- apk:
|
||||
name: foo
|
||||
state: latest
|
||||
update_cache: yes
|
||||
repository: http://dl-3.alpinelinux.org/alpine/edge/main
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
packages:
|
||||
description: a list of packages that have been changed
|
||||
returned: when packages have changed
|
||||
type: list
|
||||
sample: ['package', 'other-package']
|
||||
'''
|
||||
|
||||
import re
|
||||
# Import module snippets.
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def parse_for_packages(stdout):
|
||||
packages = []
|
||||
data = stdout.split('\n')
|
||||
regex = re.compile(r'^\(\d+/\d+\)\s+\S+\s+(\S+)')
|
||||
for l in data:
|
||||
p = regex.search(l)
|
||||
if p:
|
||||
packages.append(p.group(1))
|
||||
return packages
|
||||
|
||||
|
||||
def update_package_db(module, exit):
|
||||
cmd = "%s update" % (APK_PATH)
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
if rc != 0:
|
||||
module.fail_json(msg="could not update package db", stdout=stdout, stderr=stderr)
|
||||
elif exit:
|
||||
module.exit_json(changed=True, msg='updated repository indexes', stdout=stdout, stderr=stderr)
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def query_toplevel(module, name):
|
||||
# /etc/apk/world contains a list of top-level packages separated by ' ' or \n
|
||||
# packages may contain repository (@) or version (=<>~) separator characters or start with negation !
|
||||
regex = re.compile(r'^' + re.escape(name) + r'([@=<>~].+)?$')
|
||||
with open('/etc/apk/world') as f:
|
||||
content = f.read().split()
|
||||
for p in content:
|
||||
if regex.search(p):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def query_package(module, name):
|
||||
cmd = "%s -v info --installed %s" % (APK_PATH, name)
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
if rc == 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def query_latest(module, name):
|
||||
cmd = "%s version %s" % (APK_PATH, name)
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
search_pattern = r"(%s)-[\d\.\w]+-[\d\w]+\s+(.)\s+[\d\.\w]+-[\d\w]+\s+" % (re.escape(name))
|
||||
match = re.search(search_pattern, stdout)
|
||||
if match and match.group(2) == "<":
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def query_virtual(module, name):
|
||||
cmd = "%s -v info --description %s" % (APK_PATH, name)
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
search_pattern = r"^%s: virtual meta package" % (re.escape(name))
|
||||
if re.search(search_pattern, stdout):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_dependencies(module, name):
|
||||
cmd = "%s -v info --depends %s" % (APK_PATH, name)
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
dependencies = stdout.split()
|
||||
if len(dependencies) > 1:
|
||||
return dependencies[1:]
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
def upgrade_packages(module, available):
|
||||
if module.check_mode:
|
||||
cmd = "%s upgrade --simulate" % (APK_PATH)
|
||||
else:
|
||||
cmd = "%s upgrade" % (APK_PATH)
|
||||
if available:
|
||||
cmd = "%s --available" % cmd
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
packagelist = parse_for_packages(stdout)
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed to upgrade packages", stdout=stdout, stderr=stderr, packages=packagelist)
|
||||
if re.search(r'^OK', stdout):
|
||||
module.exit_json(changed=False, msg="packages already upgraded", stdout=stdout, stderr=stderr, packages=packagelist)
|
||||
module.exit_json(changed=True, msg="upgraded packages", stdout=stdout, stderr=stderr, packages=packagelist)
|
||||
|
||||
|
||||
def install_packages(module, names, state):
|
||||
upgrade = False
|
||||
to_install = []
|
||||
to_upgrade = []
|
||||
for name in names:
|
||||
# Check if virtual package
|
||||
if query_virtual(module, name):
|
||||
# Get virtual package dependencies
|
||||
dependencies = get_dependencies(module, name)
|
||||
for dependency in dependencies:
|
||||
if state == 'latest' and not query_latest(module, dependency):
|
||||
to_upgrade.append(dependency)
|
||||
else:
|
||||
if not query_toplevel(module, name):
|
||||
to_install.append(name)
|
||||
elif state == 'latest' and not query_latest(module, name):
|
||||
to_upgrade.append(name)
|
||||
if to_upgrade:
|
||||
upgrade = True
|
||||
if not to_install and not upgrade:
|
||||
module.exit_json(changed=False, msg="package(s) already installed")
|
||||
packages = " ".join(to_install + to_upgrade)
|
||||
if upgrade:
|
||||
if module.check_mode:
|
||||
cmd = "%s add --upgrade --simulate %s" % (APK_PATH, packages)
|
||||
else:
|
||||
cmd = "%s add --upgrade %s" % (APK_PATH, packages)
|
||||
else:
|
||||
if module.check_mode:
|
||||
cmd = "%s add --simulate %s" % (APK_PATH, packages)
|
||||
else:
|
||||
cmd = "%s add %s" % (APK_PATH, packages)
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
packagelist = parse_for_packages(stdout)
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed to install %s" % (packages), stdout=stdout, stderr=stderr, packages=packagelist)
|
||||
module.exit_json(changed=True, msg="installed %s package(s)" % (packages), stdout=stdout, stderr=stderr, packages=packagelist)
|
||||
|
||||
|
||||
def remove_packages(module, names):
|
||||
installed = []
|
||||
for name in names:
|
||||
if query_package(module, name):
|
||||
installed.append(name)
|
||||
if not installed:
|
||||
module.exit_json(changed=False, msg="package(s) already removed")
|
||||
names = " ".join(installed)
|
||||
if module.check_mode:
|
||||
cmd = "%s del --purge --simulate %s" % (APK_PATH, names)
|
||||
else:
|
||||
cmd = "%s del --purge %s" % (APK_PATH, names)
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
packagelist = parse_for_packages(stdout)
|
||||
# Check to see if packages are still present because of dependencies
|
||||
for name in installed:
|
||||
if query_package(module, name):
|
||||
rc = 1
|
||||
break
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed to remove %s package(s)" % (names), stdout=stdout, stderr=stderr, packages=packagelist)
|
||||
module.exit_json(changed=True, msg="removed %s package(s)" % (names), stdout=stdout, stderr=stderr, packages=packagelist)
|
||||
|
||||
# ==========================================
|
||||
# Main control flow.
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
state=dict(default='present', choices=['present', 'installed', 'absent', 'removed', 'latest']),
|
||||
name=dict(type='list', elements='str'),
|
||||
repository=dict(type='list'),
|
||||
update_cache=dict(default='no', type='bool'),
|
||||
upgrade=dict(default='no', type='bool'),
|
||||
available=dict(default='no', type='bool'),
|
||||
),
|
||||
required_one_of=[['name', 'update_cache', 'upgrade']],
|
||||
mutually_exclusive=[['name', 'upgrade']],
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
# Set LANG env since we parse stdout
|
||||
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
|
||||
|
||||
global APK_PATH
|
||||
APK_PATH = module.get_bin_path('apk', required=True)
|
||||
|
||||
p = module.params
|
||||
|
||||
# add repositories to the APK_PATH
|
||||
if p['repository']:
|
||||
for r in p['repository']:
|
||||
APK_PATH = "%s --repository %s --repositories-file /dev/null" % (APK_PATH, r)
|
||||
|
||||
# normalize the state parameter
|
||||
if p['state'] in ['present', 'installed']:
|
||||
p['state'] = 'present'
|
||||
if p['state'] in ['absent', 'removed']:
|
||||
p['state'] = 'absent'
|
||||
|
||||
if p['update_cache']:
|
||||
update_package_db(module, not p['name'] and not p['upgrade'])
|
||||
|
||||
if p['upgrade']:
|
||||
upgrade_packages(module, p['available'])
|
||||
|
||||
if p['state'] in ['present', 'latest']:
|
||||
install_packages(module, p['name'], p['state'])
|
||||
elif p['state'] == 'absent':
|
||||
remove_packages(module, p['name'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
175
plugins/modules/packaging/os/apt_rpm.py
Normal file
175
plugins/modules/packaging/os/apt_rpm.py
Normal file
@@ -0,0 +1,175 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2013, Evgenii Terechkov
|
||||
# Written by Evgenii Terechkov <evg@altlinux.org>
|
||||
# Based on urpmi module written by Philippe Makowski <philippem@mageia.org>
|
||||
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: apt_rpm
|
||||
short_description: apt_rpm package manager
|
||||
description:
|
||||
- Manages packages with I(apt-rpm). Both low-level (I(rpm)) and high-level (I(apt-get)) package manager binaries required.
|
||||
options:
|
||||
pkg:
|
||||
description:
|
||||
- name of package to install, upgrade or remove.
|
||||
required: true
|
||||
state:
|
||||
description:
|
||||
- Indicates the desired package state.
|
||||
choices: [ absent, present ]
|
||||
default: present
|
||||
update_cache:
|
||||
description:
|
||||
- update the package database first C(apt-get update).
|
||||
type: bool
|
||||
default: 'no'
|
||||
author:
|
||||
- Evgenii Terechkov (@evgkrsk)
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Install package foo
|
||||
apt_rpm:
|
||||
pkg: foo
|
||||
state: present
|
||||
|
||||
- name: Remove package foo
|
||||
apt_rpm:
|
||||
pkg: foo
|
||||
state: absent
|
||||
|
||||
- name: Remove packages foo and bar
|
||||
apt_rpm:
|
||||
pkg: foo,bar
|
||||
state: absent
|
||||
|
||||
# bar will be the updated if a newer version exists
|
||||
- name: Update the package database and install bar
|
||||
apt_rpm:
|
||||
name: bar
|
||||
state: present
|
||||
update_cache: yes
|
||||
'''
|
||||
|
||||
import json
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
APT_PATH = "/usr/bin/apt-get"
|
||||
RPM_PATH = "/usr/bin/rpm"
|
||||
|
||||
|
||||
def query_package(module, name):
|
||||
# rpm -q returns 0 if the package is installed,
|
||||
# 1 if it is not installed
|
||||
rc, out, err = module.run_command("%s -q %s" % (RPM_PATH, name))
|
||||
if rc == 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def query_package_provides(module, name):
|
||||
# rpm -q returns 0 if the package is installed,
|
||||
# 1 if it is not installed
|
||||
rc, out, err = module.run_command("%s -q --provides %s" % (RPM_PATH, name))
|
||||
return rc == 0
|
||||
|
||||
|
||||
def update_package_db(module):
|
||||
rc, out, err = module.run_command("%s update" % APT_PATH)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="could not update package db: %s" % err)
|
||||
|
||||
|
||||
def remove_packages(module, packages):
|
||||
|
||||
remove_c = 0
|
||||
# Using a for loop in case of error, we can report the package that failed
|
||||
for package in packages:
|
||||
# Query the package first, to see if we even need to remove
|
||||
if not query_package(module, package):
|
||||
continue
|
||||
|
||||
rc, out, err = module.run_command("%s -y remove %s" % (APT_PATH, package))
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed to remove %s: %s" % (package, err))
|
||||
|
||||
remove_c += 1
|
||||
|
||||
if remove_c > 0:
|
||||
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
|
||||
|
||||
module.exit_json(changed=False, msg="package(s) already absent")
|
||||
|
||||
|
||||
def install_packages(module, pkgspec):
|
||||
|
||||
packages = ""
|
||||
for package in pkgspec:
|
||||
if not query_package_provides(module, package):
|
||||
packages += "'%s' " % package
|
||||
|
||||
if len(packages) != 0:
|
||||
|
||||
rc, out, err = module.run_command("%s -y install %s" % (APT_PATH, packages))
|
||||
|
||||
installed = True
|
||||
for packages in pkgspec:
|
||||
if not query_package_provides(module, package):
|
||||
installed = False
|
||||
|
||||
# apt-rpm always have 0 for exit code if --force is used
|
||||
if rc or not installed:
|
||||
module.fail_json(msg="'apt-get -y install %s' failed: %s" % (packages, err))
|
||||
else:
|
||||
module.exit_json(changed=True, msg="%s present(s)" % packages)
|
||||
else:
|
||||
module.exit_json(changed=False)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
state=dict(type='str', default='installed', choices=['absent', 'installed', 'present', 'removed']),
|
||||
update_cache=dict(type='bool', default=False, aliases=['update-cache']),
|
||||
package=dict(type='str', required=True, aliases=['name', 'pkg']),
|
||||
),
|
||||
)
|
||||
|
||||
if not os.path.exists(APT_PATH) or not os.path.exists(RPM_PATH):
|
||||
module.fail_json(msg="cannot find /usr/bin/apt-get and/or /usr/bin/rpm")
|
||||
|
||||
p = module.params
|
||||
|
||||
if p['update_cache']:
|
||||
update_package_db(module)
|
||||
|
||||
packages = p['package'].split(',')
|
||||
|
||||
if p['state'] in ['installed', 'present']:
|
||||
install_packages(module, packages)
|
||||
|
||||
elif p['state'] in ['absent', 'removed']:
|
||||
remove_packages(module, packages)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
299
plugins/modules/packaging/os/flatpak.py
Normal file
299
plugins/modules/packaging/os/flatpak.py
Normal file
@@ -0,0 +1,299 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2017 John Kwiatkoski (@JayKayy) <jkwiat40@gmail.com>
|
||||
# Copyright: (c) 2018 Alexander Bethke (@oolongbrothers) <oolongbrothers@gmx.net>
|
||||
# Copyright: (c) 2017 Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
|
||||
# ATTENTION CONTRIBUTORS!
|
||||
#
|
||||
# TL;DR: Run this module's integration tests manually before opening a pull request
|
||||
#
|
||||
# Long explanation:
|
||||
# The integration tests for this module are currently NOT run on the Ansible project's continuous
|
||||
# delivery pipeline. So please: When you make changes to this module, make sure that you run the
|
||||
# included integration tests manually for both Python 2 and Python 3:
|
||||
#
|
||||
# Python 2:
|
||||
# ansible-test integration -v --docker fedora28 --docker-privileged --allow-unsupported --python 2.7 flatpak
|
||||
# Python 3:
|
||||
# ansible-test integration -v --docker fedora28 --docker-privileged --allow-unsupported --python 3.6 flatpak
|
||||
#
|
||||
# Because of external dependencies, the current integration tests are somewhat too slow and brittle
|
||||
# to be included right now. I have plans to rewrite the integration tests based on a local flatpak
|
||||
# repository so that they can be included into the normal CI pipeline.
|
||||
# //oolongbrothers
|
||||
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: flatpak
|
||||
short_description: Manage flatpaks
|
||||
description:
|
||||
- Allows users to add or remove flatpaks.
|
||||
- See the M(flatpak_remote) module for managing flatpak remotes.
|
||||
author:
|
||||
- John Kwiatkoski (@JayKayy)
|
||||
- Alexander Bethke (@oolongbrothers)
|
||||
requirements:
|
||||
- flatpak
|
||||
options:
|
||||
executable:
|
||||
description:
|
||||
- The path to the C(flatpak) executable to use.
|
||||
- By default, this module looks for the C(flatpak) executable on the path.
|
||||
default: flatpak
|
||||
method:
|
||||
description:
|
||||
- The installation method to use.
|
||||
- Defines if the I(flatpak) is supposed to be installed globally for the whole C(system)
|
||||
or only for the current C(user).
|
||||
choices: [ system, user ]
|
||||
default: system
|
||||
name:
|
||||
description:
|
||||
- The name of the flatpak to manage.
|
||||
- When used with I(state=present), I(name) can be specified as an C(http(s)) URL to a
|
||||
C(flatpakref) file or the unique reverse DNS name that identifies a flatpak.
|
||||
- When supplying a reverse DNS name, you can use the I(remote) option to specify on what remote
|
||||
to look for the flatpak. An example for a reverse DNS name is C(org.gnome.gedit).
|
||||
- When used with I(state=absent), it is recommended to specify the name in the reverse DNS
|
||||
format.
|
||||
- When supplying an C(http(s)) URL with I(state=absent), the module will try to match the
|
||||
installed flatpak based on the name of the flatpakref to remove it. However, there is no
|
||||
guarantee that the names of the flatpakref file and the reverse DNS name of the installed
|
||||
flatpak do match.
|
||||
required: true
|
||||
remote:
|
||||
description:
|
||||
- The flatpak remote (repository) to install the flatpak from.
|
||||
- By default, C(flathub) is assumed, but you do need to add the flathub flatpak_remote before
|
||||
you can use this.
|
||||
- See the M(flatpak_remote) module for managing flatpak remotes.
|
||||
default: flathub
|
||||
state:
|
||||
description:
|
||||
- Indicates the desired package state.
|
||||
choices: [ absent, present ]
|
||||
default: present
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: Install the spotify flatpak
|
||||
flatpak:
|
||||
name: https://s3.amazonaws.com/alexlarsson/spotify-repo/spotify.flatpakref
|
||||
state: present
|
||||
|
||||
- name: Install the gedit flatpak package
|
||||
flatpak:
|
||||
name: https://git.gnome.org/browse/gnome-apps-nightly/plain/gedit.flatpakref
|
||||
state: present
|
||||
|
||||
- name: Install the gedit package from flathub for current user
|
||||
flatpak:
|
||||
name: org.gnome.gedit
|
||||
state: present
|
||||
method: user
|
||||
|
||||
- name: Install the Gnome Calendar flatpak from the gnome remote system-wide
|
||||
flatpak:
|
||||
name: org.gnome.Calendar
|
||||
state: present
|
||||
remote: gnome
|
||||
|
||||
- name: Remove the gedit flatpak
|
||||
flatpak:
|
||||
name: org.gnome.gedit
|
||||
state: absent
|
||||
'''
|
||||
|
||||
RETURN = r'''
|
||||
command:
|
||||
description: The exact flatpak command that was executed
|
||||
returned: When a flatpak command has been executed
|
||||
type: str
|
||||
sample: "/usr/bin/flatpak install --user -y flathub org.gnome.Calculator"
|
||||
msg:
|
||||
description: Module error message
|
||||
returned: failure
|
||||
type: str
|
||||
sample: "Executable '/usr/local/bin/flatpak' was not found on the system."
|
||||
rc:
|
||||
description: Return code from flatpak binary
|
||||
returned: When a flatpak command has been executed
|
||||
type: int
|
||||
sample: 0
|
||||
stderr:
|
||||
description: Error output from flatpak binary
|
||||
returned: When a flatpak command has been executed
|
||||
type: str
|
||||
sample: "error: Error searching remote flathub: Can't find ref org.gnome.KDE"
|
||||
stdout:
|
||||
description: Output from flatpak binary
|
||||
returned: When a flatpak command has been executed
|
||||
type: str
|
||||
sample: "org.gnome.Calendar/x86_64/stable\tcurrent\norg.gnome.gitg/x86_64/stable\tcurrent\n"
|
||||
'''
|
||||
|
||||
import subprocess
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlparse
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
OUTDATED_FLATPAK_VERSION_ERROR_MESSAGE = "Unknown option --columns=application"
|
||||
|
||||
|
||||
def install_flat(module, binary, remote, name, method):
|
||||
"""Add a new flatpak."""
|
||||
global result
|
||||
if name.startswith('http://') or name.startswith('https://'):
|
||||
command = "{0} install --{1} -y {2}".format(binary, method, name)
|
||||
else:
|
||||
command = "{0} install --{1} -y {2} {3}".format(binary, method, remote, name)
|
||||
_flatpak_command(module, module.check_mode, command)
|
||||
result['changed'] = True
|
||||
|
||||
|
||||
def uninstall_flat(module, binary, name, method):
|
||||
"""Remove an existing flatpak."""
|
||||
global result
|
||||
installed_flat_name = _match_installed_flat_name(module, binary, name, method)
|
||||
command = "{0} uninstall -y --{1} {2}".format(binary, method, installed_flat_name)
|
||||
_flatpak_command(module, module.check_mode, command)
|
||||
result['changed'] = True
|
||||
|
||||
|
||||
def flatpak_exists(module, binary, name, method):
|
||||
"""Check if the flatpak is installed."""
|
||||
command = "{0} list --{1} --app".format(binary, method)
|
||||
output = _flatpak_command(module, False, command)
|
||||
name = _parse_flatpak_name(name).lower()
|
||||
if name in output.lower():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _match_installed_flat_name(module, binary, name, method):
|
||||
# This is a difficult function, since if the user supplies a flatpakref url,
|
||||
# we have to rely on a naming convention:
|
||||
# The flatpakref file name needs to match the flatpak name
|
||||
global result
|
||||
parsed_name = _parse_flatpak_name(name)
|
||||
# Try running flatpak list with columns feature
|
||||
command = "{0} list --{1} --app --columns=application".format(binary, method)
|
||||
_flatpak_command(module, False, command, ignore_failure=True)
|
||||
if result['rc'] != 0 and OUTDATED_FLATPAK_VERSION_ERROR_MESSAGE in result['stderr']:
|
||||
# Probably flatpak before 1.2
|
||||
matched_flatpak_name = \
|
||||
_match_flat_using_flatpak_column_feature(module, binary, parsed_name, method)
|
||||
else:
|
||||
# Probably flatpak >= 1.2
|
||||
matched_flatpak_name = \
|
||||
_match_flat_using_outdated_flatpak_format(module, binary, parsed_name, method)
|
||||
|
||||
if matched_flatpak_name:
|
||||
return matched_flatpak_name
|
||||
else:
|
||||
result['msg'] = "Flatpak removal failed: Could not match any installed flatpaks to " +\
|
||||
"the name `{0}`. ".format(_parse_flatpak_name(name)) +\
|
||||
"If you used a URL, try using the reverse DNS name of the flatpak"
|
||||
module.fail_json(**result)
|
||||
|
||||
|
||||
def _match_flat_using_outdated_flatpak_format(module, binary, parsed_name, method):
|
||||
global result
|
||||
command = "{0} list --{1} --app --columns=application".format(binary, method)
|
||||
output = _flatpak_command(module, False, command)
|
||||
for row in output.split('\n'):
|
||||
if parsed_name.lower() == row.lower():
|
||||
return row
|
||||
|
||||
|
||||
def _match_flat_using_flatpak_column_feature(module, binary, parsed_name, method):
|
||||
global result
|
||||
command = "{0} list --{1} --app".format(binary, method)
|
||||
output = _flatpak_command(module, False, command)
|
||||
for row in output.split('\n'):
|
||||
if parsed_name.lower() in row.lower():
|
||||
return row.split()[0]
|
||||
|
||||
|
||||
def _parse_flatpak_name(name):
|
||||
if name.startswith('http://') or name.startswith('https://'):
|
||||
file_name = urlparse(name).path.split('/')[-1]
|
||||
file_name_without_extension = file_name.split('.')[0:-1]
|
||||
common_name = ".".join(file_name_without_extension)
|
||||
else:
|
||||
common_name = name
|
||||
return common_name
|
||||
|
||||
|
||||
def _flatpak_command(module, noop, command, ignore_failure=False):
|
||||
global result
|
||||
if noop:
|
||||
result['rc'] = 0
|
||||
result['command'] = command
|
||||
return ""
|
||||
|
||||
process = subprocess.Popen(
|
||||
command.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
stdout_data, stderr_data = process.communicate()
|
||||
result['rc'] = process.returncode
|
||||
result['command'] = command
|
||||
result['stdout'] = to_native(stdout_data)
|
||||
result['stderr'] = to_native(stderr_data)
|
||||
if result['rc'] != 0 and not ignore_failure:
|
||||
module.fail_json(msg="Failed to execute flatpak command", **result)
|
||||
return to_native(stdout_data)
|
||||
|
||||
|
||||
def main():
|
||||
# This module supports check mode
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='str', required=True),
|
||||
remote=dict(type='str', default='flathub'),
|
||||
method=dict(type='str', default='system',
|
||||
choices=['user', 'system']),
|
||||
state=dict(type='str', default='present',
|
||||
choices=['absent', 'present']),
|
||||
executable=dict(type='path', default='flatpak')
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
name = module.params['name']
|
||||
state = module.params['state']
|
||||
remote = module.params['remote']
|
||||
method = module.params['method']
|
||||
executable = module.params['executable']
|
||||
binary = module.get_bin_path(executable, None)
|
||||
|
||||
global result
|
||||
result = dict(
|
||||
changed=False
|
||||
)
|
||||
|
||||
# If the binary was not found, fail the operation
|
||||
if not binary:
|
||||
module.fail_json(msg="Executable '%s' was not found on the system." % executable, **result)
|
||||
|
||||
if state == 'present' and not flatpak_exists(module, binary, name, method):
|
||||
install_flat(module, binary, remote, name, method)
|
||||
elif state == 'absent' and flatpak_exists(module, binary, name, method):
|
||||
uninstall_flat(module, binary, name, method)
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
242
plugins/modules/packaging/os/flatpak_remote.py
Normal file
242
plugins/modules/packaging/os/flatpak_remote.py
Normal file
@@ -0,0 +1,242 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2017 John Kwiatkoski (@JayKayy) <jkwiat40@gmail.com>
|
||||
# Copyright: (c) 2018 Alexander Bethke (@oolongbrothers) <oolongbrothers@gmx.net>
|
||||
# Copyright: (c) 2017 Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
|
||||
# ATTENTION CONTRIBUTORS!
|
||||
#
|
||||
# TL;DR: Run this module's integration tests manually before opening a pull request
|
||||
#
|
||||
# Long explanation:
|
||||
# The integration tests for this module are currently NOT run on the Ansible project's continuous
|
||||
# delivery pipeline. So please: When you make changes to this module, make sure that you run the
|
||||
# included integration tests manually for both Python 2 and Python 3:
|
||||
#
|
||||
# Python 2:
|
||||
# ansible-test integration -v --docker fedora28 --docker-privileged --allow-unsupported --python 2.7 flatpak_remote
|
||||
# Python 3:
|
||||
# ansible-test integration -v --docker fedora28 --docker-privileged --allow-unsupported --python 3.6 flatpak_remote
|
||||
#
|
||||
# Because of external dependencies, the current integration tests are somewhat too slow and brittle
|
||||
# to be included right now. I have plans to rewrite the integration tests based on a local flatpak
|
||||
# repository so that they can be included into the normal CI pipeline.
|
||||
# //oolongbrothers
|
||||
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: flatpak_remote
|
||||
short_description: Manage flatpak repository remotes
|
||||
description:
|
||||
- Allows users to add or remove flatpak remotes.
|
||||
- The flatpak remotes concept is comparable to what is called repositories in other packaging
|
||||
formats.
|
||||
- Currently, remote addition is only supported via I(flatpakrepo) file URLs.
|
||||
- Existing remotes will not be updated.
|
||||
- See the M(flatpak) module for managing flatpaks.
|
||||
author:
|
||||
- John Kwiatkoski (@JayKayy)
|
||||
- Alexander Bethke (@oolongbrothers)
|
||||
requirements:
|
||||
- flatpak
|
||||
options:
|
||||
executable:
|
||||
description:
|
||||
- The path to the C(flatpak) executable to use.
|
||||
- By default, this module looks for the C(flatpak) executable on the path.
|
||||
default: flatpak
|
||||
flatpakrepo_url:
|
||||
description:
|
||||
- The URL to the I(flatpakrepo) file representing the repository remote to add.
|
||||
- When used with I(state=present), the flatpak remote specified under the I(flatpakrepo_url)
|
||||
is added using the specified installation C(method).
|
||||
- When used with I(state=absent), this is not required.
|
||||
- Required when I(state=present).
|
||||
method:
|
||||
description:
|
||||
- The installation method to use.
|
||||
- Defines if the I(flatpak) is supposed to be installed globally for the whole C(system)
|
||||
or only for the current C(user).
|
||||
choices: [ system, user ]
|
||||
default: system
|
||||
name:
|
||||
description:
|
||||
- The desired name for the flatpak remote to be registered under on the managed host.
|
||||
- When used with I(state=present), the remote will be added to the managed host under
|
||||
the specified I(name).
|
||||
- When used with I(state=absent) the remote with that name will be removed.
|
||||
required: true
|
||||
state:
|
||||
description:
|
||||
- Indicates the desired package state.
|
||||
choices: [ absent, present ]
|
||||
default: present
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: Add the Gnome flatpak remote to the system installation
|
||||
flatpak_remote:
|
||||
name: gnome
|
||||
state: present
|
||||
flatpakrepo_url: https://sdk.gnome.org/gnome-apps.flatpakrepo
|
||||
|
||||
- name: Add the flathub flatpak repository remote to the user installation
|
||||
flatpak_remote:
|
||||
name: flathub
|
||||
state: present
|
||||
flatpakrepo_url: https://dl.flathub.org/repo/flathub.flatpakrepo
|
||||
method: user
|
||||
|
||||
- name: Remove the Gnome flatpak remote from the user installation
|
||||
flatpak_remote:
|
||||
name: gnome
|
||||
state: absent
|
||||
method: user
|
||||
|
||||
- name: Remove the flathub remote from the system installation
|
||||
flatpak_remote:
|
||||
name: flathub
|
||||
state: absent
|
||||
'''
|
||||
|
||||
RETURN = r'''
|
||||
command:
|
||||
description: The exact flatpak command that was executed
|
||||
returned: When a flatpak command has been executed
|
||||
type: str
|
||||
sample: "/usr/bin/flatpak remote-add --system flatpak-test https://dl.flathub.org/repo/flathub.flatpakrepo"
|
||||
msg:
|
||||
description: Module error message
|
||||
returned: failure
|
||||
type: str
|
||||
sample: "Executable '/usr/local/bin/flatpak' was not found on the system."
|
||||
rc:
|
||||
description: Return code from flatpak binary
|
||||
returned: When a flatpak command has been executed
|
||||
type: int
|
||||
sample: 0
|
||||
stderr:
|
||||
description: Error output from flatpak binary
|
||||
returned: When a flatpak command has been executed
|
||||
type: str
|
||||
sample: "error: GPG verification enabled, but no summary found (check that the configured URL in remote config is correct)\n"
|
||||
stdout:
|
||||
description: Output from flatpak binary
|
||||
returned: When a flatpak command has been executed
|
||||
type: str
|
||||
sample: "flathub\tFlathub\thttps://dl.flathub.org/repo/\t1\t\n"
|
||||
'''
|
||||
|
||||
import subprocess
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_bytes, to_native
|
||||
|
||||
|
||||
def add_remote(module, binary, name, flatpakrepo_url, method):
|
||||
"""Add a new remote."""
|
||||
global result
|
||||
command = "{0} remote-add --{1} {2} {3}".format(
|
||||
binary, method, name, flatpakrepo_url)
|
||||
_flatpak_command(module, module.check_mode, command)
|
||||
result['changed'] = True
|
||||
|
||||
|
||||
def remove_remote(module, binary, name, method):
|
||||
"""Remove an existing remote."""
|
||||
global result
|
||||
command = "{0} remote-delete --{1} --force {2} ".format(
|
||||
binary, method, name)
|
||||
_flatpak_command(module, module.check_mode, command)
|
||||
result['changed'] = True
|
||||
|
||||
|
||||
def remote_exists(module, binary, name, method):
|
||||
"""Check if the remote exists."""
|
||||
command = "{0} remote-list -d --{1}".format(binary, method)
|
||||
# The query operation for the remote needs to be run even in check mode
|
||||
output = _flatpak_command(module, False, command)
|
||||
for line in output.splitlines():
|
||||
listed_remote = line.split()
|
||||
if len(listed_remote) == 0:
|
||||
continue
|
||||
if listed_remote[0] == to_native(name):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _flatpak_command(module, noop, command):
|
||||
global result
|
||||
if noop:
|
||||
result['rc'] = 0
|
||||
result['command'] = command
|
||||
return ""
|
||||
|
||||
process = subprocess.Popen(
|
||||
command.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
stdout_data, stderr_data = process.communicate()
|
||||
result['rc'] = process.returncode
|
||||
result['command'] = command
|
||||
result['stdout'] = stdout_data
|
||||
result['stderr'] = stderr_data
|
||||
if result['rc'] != 0:
|
||||
module.fail_json(msg="Failed to execute flatpak command", **result)
|
||||
return to_native(stdout_data)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='str', required=True),
|
||||
flatpakrepo_url=dict(type='str'),
|
||||
method=dict(type='str', default='system',
|
||||
choices=['user', 'system']),
|
||||
state=dict(type='str', default="present",
|
||||
choices=['absent', 'present']),
|
||||
executable=dict(type='str', default="flatpak")
|
||||
),
|
||||
# This module supports check mode
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
name = module.params['name']
|
||||
flatpakrepo_url = module.params['flatpakrepo_url']
|
||||
method = module.params['method']
|
||||
state = module.params['state']
|
||||
executable = module.params['executable']
|
||||
binary = module.get_bin_path(executable, None)
|
||||
|
||||
if flatpakrepo_url is None:
|
||||
flatpakrepo_url = ''
|
||||
|
||||
global result
|
||||
result = dict(
|
||||
changed=False
|
||||
)
|
||||
|
||||
# If the binary was not found, fail the operation
|
||||
if not binary:
|
||||
module.fail_json(msg="Executable '%s' was not found on the system." % executable, **result)
|
||||
|
||||
remote_already_exists = remote_exists(module, binary, to_bytes(name), method)
|
||||
|
||||
if state == 'present' and not remote_already_exists:
|
||||
add_remote(module, binary, name, flatpakrepo_url, method)
|
||||
elif state == 'absent' and remote_already_exists:
|
||||
remove_remote(module, binary, name, method)
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
904
plugins/modules/packaging/os/homebrew.py
Normal file
904
plugins/modules/packaging/os/homebrew.py
Normal file
@@ -0,0 +1,904 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, Andrew Dunham <andrew@du.nham.ca>
|
||||
# (c) 2013, Daniel Jaouen <dcj24@cornell.edu>
|
||||
# (c) 2015, Indrajit Raychaudhuri <irc+code@indrajit.com>
|
||||
#
|
||||
# Based on macports (Jimmy Tang <jcftang@gmail.com>)
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: homebrew
|
||||
author:
|
||||
- "Indrajit Raychaudhuri (@indrajitr)"
|
||||
- "Daniel Jaouen (@danieljaouen)"
|
||||
- "Andrew Dunham (@andrew-d)"
|
||||
requirements:
|
||||
- "python >= 2.6"
|
||||
- homebrew must already be installed on the target system
|
||||
short_description: Package manager for Homebrew
|
||||
description:
|
||||
- Manages Homebrew packages
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- list of names of packages to install/remove
|
||||
aliases: ['pkg', 'package', 'formula']
|
||||
type: list
|
||||
elements: str
|
||||
path:
|
||||
description:
|
||||
- "A ':' separated list of paths to search for 'brew' executable.
|
||||
Since a package (I(formula) in homebrew parlance) location is prefixed relative to the actual path of I(brew) command,
|
||||
providing an alternative I(brew) path enables managing different set of packages in an alternative location in the system."
|
||||
default: '/usr/local/bin'
|
||||
state:
|
||||
description:
|
||||
- state of the package
|
||||
choices: [ 'head', 'latest', 'present', 'absent', 'linked', 'unlinked' ]
|
||||
default: present
|
||||
update_homebrew:
|
||||
description:
|
||||
- update homebrew itself first
|
||||
type: bool
|
||||
default: 'no'
|
||||
aliases: ['update-brew']
|
||||
upgrade_all:
|
||||
description:
|
||||
- upgrade all homebrew packages
|
||||
type: bool
|
||||
default: 'no'
|
||||
aliases: ['upgrade']
|
||||
install_options:
|
||||
description:
|
||||
- options flags to install a package
|
||||
aliases: ['options']
|
||||
notes:
|
||||
- When used with a `loop:` each package will be processed individually,
|
||||
it is much more efficient to pass the list directly to the `name` option.
|
||||
'''
|
||||
EXAMPLES = '''
|
||||
# Install formula foo with 'brew' in default path (C(/usr/local/bin))
|
||||
- homebrew:
|
||||
name: foo
|
||||
state: present
|
||||
|
||||
# Install formula foo with 'brew' in alternate path C(/my/other/location/bin)
|
||||
- homebrew:
|
||||
name: foo
|
||||
path: /my/other/location/bin
|
||||
state: present
|
||||
|
||||
# Update homebrew first and install formula foo with 'brew' in default path
|
||||
- homebrew:
|
||||
name: foo
|
||||
state: present
|
||||
update_homebrew: yes
|
||||
|
||||
# Update homebrew first and upgrade formula foo to latest available with 'brew' in default path
|
||||
- homebrew:
|
||||
name: foo
|
||||
state: latest
|
||||
update_homebrew: yes
|
||||
|
||||
# Update homebrew and upgrade all packages
|
||||
- homebrew:
|
||||
update_homebrew: yes
|
||||
upgrade_all: yes
|
||||
|
||||
# Miscellaneous other examples
|
||||
- homebrew:
|
||||
name: foo
|
||||
state: head
|
||||
|
||||
- homebrew:
|
||||
name: foo
|
||||
state: linked
|
||||
|
||||
- homebrew:
|
||||
name: foo
|
||||
state: absent
|
||||
|
||||
- homebrew:
|
||||
name: foo,bar
|
||||
state: absent
|
||||
|
||||
- homebrew:
|
||||
name: foo
|
||||
state: present
|
||||
install_options: with-baz,enable-debug
|
||||
'''
|
||||
|
||||
import os.path
|
||||
import re
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six import iteritems, string_types
|
||||
|
||||
|
||||
# exceptions -------------------------------------------------------------- {{{
|
||||
class HomebrewException(Exception):
|
||||
pass
|
||||
# /exceptions ------------------------------------------------------------- }}}
|
||||
|
||||
|
||||
# utils ------------------------------------------------------------------- {{{
|
||||
def _create_regex_group(s):
|
||||
lines = (line.strip() for line in s.split('\n') if line.strip())
|
||||
chars = filter(None, (line.split('#')[0].strip() for line in lines))
|
||||
group = r'[^' + r''.join(chars) + r']'
|
||||
return re.compile(group)
|
||||
# /utils ------------------------------------------------------------------ }}}
|
||||
|
||||
|
||||
class Homebrew(object):
|
||||
'''A class to manage Homebrew packages.'''
|
||||
|
||||
# class regexes ------------------------------------------------ {{{
|
||||
VALID_PATH_CHARS = r'''
|
||||
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
|
||||
\s # spaces
|
||||
: # colons
|
||||
{sep} # the OS-specific path separator
|
||||
. # dots
|
||||
- # dashes
|
||||
'''.format(sep=os.path.sep)
|
||||
|
||||
VALID_BREW_PATH_CHARS = r'''
|
||||
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
|
||||
\s # spaces
|
||||
{sep} # the OS-specific path separator
|
||||
. # dots
|
||||
- # dashes
|
||||
'''.format(sep=os.path.sep)
|
||||
|
||||
VALID_PACKAGE_CHARS = r'''
|
||||
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
|
||||
. # dots
|
||||
/ # slash (for taps)
|
||||
\+ # plusses
|
||||
- # dashes
|
||||
: # colons (for URLs)
|
||||
@ # at-sign
|
||||
'''
|
||||
|
||||
INVALID_PATH_REGEX = _create_regex_group(VALID_PATH_CHARS)
|
||||
INVALID_BREW_PATH_REGEX = _create_regex_group(VALID_BREW_PATH_CHARS)
|
||||
INVALID_PACKAGE_REGEX = _create_regex_group(VALID_PACKAGE_CHARS)
|
||||
# /class regexes ----------------------------------------------- }}}
|
||||
|
||||
# class validations -------------------------------------------- {{{
|
||||
@classmethod
|
||||
def valid_path(cls, path):
|
||||
'''
|
||||
`path` must be one of:
|
||||
- list of paths
|
||||
- a string containing only:
|
||||
- alphanumeric characters
|
||||
- dashes
|
||||
- dots
|
||||
- spaces
|
||||
- colons
|
||||
- os.path.sep
|
||||
'''
|
||||
|
||||
if isinstance(path, string_types):
|
||||
return not cls.INVALID_PATH_REGEX.search(path)
|
||||
|
||||
try:
|
||||
iter(path)
|
||||
except TypeError:
|
||||
return False
|
||||
else:
|
||||
paths = path
|
||||
return all(cls.valid_brew_path(path_) for path_ in paths)
|
||||
|
||||
@classmethod
|
||||
def valid_brew_path(cls, brew_path):
|
||||
'''
|
||||
`brew_path` must be one of:
|
||||
- None
|
||||
- a string containing only:
|
||||
- alphanumeric characters
|
||||
- dashes
|
||||
- dots
|
||||
- spaces
|
||||
- os.path.sep
|
||||
'''
|
||||
|
||||
if brew_path is None:
|
||||
return True
|
||||
|
||||
return (
|
||||
isinstance(brew_path, string_types)
|
||||
and not cls.INVALID_BREW_PATH_REGEX.search(brew_path)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def valid_package(cls, package):
|
||||
'''A valid package is either None or alphanumeric.'''
|
||||
|
||||
if package is None:
|
||||
return True
|
||||
|
||||
return (
|
||||
isinstance(package, string_types)
|
||||
and not cls.INVALID_PACKAGE_REGEX.search(package)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def valid_state(cls, state):
|
||||
'''
|
||||
A valid state is one of:
|
||||
- None
|
||||
- installed
|
||||
- upgraded
|
||||
- head
|
||||
- linked
|
||||
- unlinked
|
||||
- absent
|
||||
'''
|
||||
|
||||
if state is None:
|
||||
return True
|
||||
else:
|
||||
return (
|
||||
isinstance(state, string_types)
|
||||
and state.lower() in (
|
||||
'installed',
|
||||
'upgraded',
|
||||
'head',
|
||||
'linked',
|
||||
'unlinked',
|
||||
'absent',
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def valid_module(cls, module):
|
||||
'''A valid module is an instance of AnsibleModule.'''
|
||||
|
||||
return isinstance(module, AnsibleModule)
|
||||
|
||||
# /class validations ------------------------------------------- }}}
|
||||
|
||||
# class properties --------------------------------------------- {{{
|
||||
@property
|
||||
def module(self):
|
||||
return self._module
|
||||
|
||||
@module.setter
|
||||
def module(self, module):
|
||||
if not self.valid_module(module):
|
||||
self._module = None
|
||||
self.failed = True
|
||||
self.message = 'Invalid module: {0}.'.format(module)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
else:
|
||||
self._module = module
|
||||
return module
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return self._path
|
||||
|
||||
@path.setter
|
||||
def path(self, path):
|
||||
if not self.valid_path(path):
|
||||
self._path = []
|
||||
self.failed = True
|
||||
self.message = 'Invalid path: {0}.'.format(path)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
else:
|
||||
if isinstance(path, string_types):
|
||||
self._path = path.split(':')
|
||||
else:
|
||||
self._path = path
|
||||
|
||||
return path
|
||||
|
||||
@property
|
||||
def brew_path(self):
|
||||
return self._brew_path
|
||||
|
||||
@brew_path.setter
|
||||
def brew_path(self, brew_path):
|
||||
if not self.valid_brew_path(brew_path):
|
||||
self._brew_path = None
|
||||
self.failed = True
|
||||
self.message = 'Invalid brew_path: {0}.'.format(brew_path)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
else:
|
||||
self._brew_path = brew_path
|
||||
return brew_path
|
||||
|
||||
@property
|
||||
def params(self):
|
||||
return self._params
|
||||
|
||||
@params.setter
|
||||
def params(self, params):
|
||||
self._params = self.module.params
|
||||
return self._params
|
||||
|
||||
@property
|
||||
def current_package(self):
|
||||
return self._current_package
|
||||
|
||||
@current_package.setter
|
||||
def current_package(self, package):
|
||||
if not self.valid_package(package):
|
||||
self._current_package = None
|
||||
self.failed = True
|
||||
self.message = 'Invalid package: {0}.'.format(package)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
else:
|
||||
self._current_package = package
|
||||
return package
|
||||
# /class properties -------------------------------------------- }}}
|
||||
|
||||
def __init__(self, module, path, packages=None, state=None,
|
||||
update_homebrew=False, upgrade_all=False,
|
||||
install_options=None):
|
||||
if not install_options:
|
||||
install_options = list()
|
||||
self._setup_status_vars()
|
||||
self._setup_instance_vars(module=module, path=path, packages=packages,
|
||||
state=state, update_homebrew=update_homebrew,
|
||||
upgrade_all=upgrade_all,
|
||||
install_options=install_options, )
|
||||
|
||||
self._prep()
|
||||
|
||||
# prep --------------------------------------------------------- {{{
|
||||
def _setup_status_vars(self):
|
||||
self.failed = False
|
||||
self.changed = False
|
||||
self.changed_count = 0
|
||||
self.unchanged_count = 0
|
||||
self.message = ''
|
||||
|
||||
def _setup_instance_vars(self, **kwargs):
|
||||
for key, val in iteritems(kwargs):
|
||||
setattr(self, key, val)
|
||||
|
||||
def _prep(self):
|
||||
self._prep_brew_path()
|
||||
|
||||
def _prep_brew_path(self):
|
||||
if not self.module:
|
||||
self.brew_path = None
|
||||
self.failed = True
|
||||
self.message = 'AnsibleModule not set.'
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
self.brew_path = self.module.get_bin_path(
|
||||
'brew',
|
||||
required=True,
|
||||
opt_dirs=self.path,
|
||||
)
|
||||
if not self.brew_path:
|
||||
self.brew_path = None
|
||||
self.failed = True
|
||||
self.message = 'Unable to locate homebrew executable.'
|
||||
raise HomebrewException('Unable to locate homebrew executable.')
|
||||
|
||||
return self.brew_path
|
||||
|
||||
def _status(self):
|
||||
return (self.failed, self.changed, self.message)
|
||||
# /prep -------------------------------------------------------- }}}
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self._run()
|
||||
except HomebrewException:
|
||||
pass
|
||||
|
||||
if not self.failed and (self.changed_count + self.unchanged_count > 1):
|
||||
self.message = "Changed: %d, Unchanged: %d" % (
|
||||
self.changed_count,
|
||||
self.unchanged_count,
|
||||
)
|
||||
(failed, changed, message) = self._status()
|
||||
|
||||
return (failed, changed, message)
|
||||
|
||||
# checks ------------------------------------------------------- {{{
|
||||
def _current_package_is_installed(self):
|
||||
if not self.valid_package(self.current_package):
|
||||
self.failed = True
|
||||
self.message = 'Invalid package: {0}.'.format(self.current_package)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
cmd = [
|
||||
"{brew_path}".format(brew_path=self.brew_path),
|
||||
"info",
|
||||
self.current_package,
|
||||
]
|
||||
rc, out, err = self.module.run_command(cmd)
|
||||
for line in out.split('\n'):
|
||||
if (
|
||||
re.search(r'Built from source', line)
|
||||
or re.search(r'Poured from bottle', line)
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _current_package_is_outdated(self):
|
||||
if not self.valid_package(self.current_package):
|
||||
return False
|
||||
|
||||
rc, out, err = self.module.run_command([
|
||||
self.brew_path,
|
||||
'outdated',
|
||||
self.current_package,
|
||||
])
|
||||
|
||||
return rc != 0
|
||||
|
||||
def _current_package_is_installed_from_head(self):
|
||||
if not Homebrew.valid_package(self.current_package):
|
||||
return False
|
||||
elif not self._current_package_is_installed():
|
||||
return False
|
||||
|
||||
rc, out, err = self.module.run_command([
|
||||
self.brew_path,
|
||||
'info',
|
||||
self.current_package,
|
||||
])
|
||||
|
||||
try:
|
||||
version_info = [line for line in out.split('\n') if line][0]
|
||||
except IndexError:
|
||||
return False
|
||||
|
||||
return version_info.split(' ')[-1] == 'HEAD'
|
||||
# /checks ------------------------------------------------------ }}}
|
||||
|
||||
# commands ----------------------------------------------------- {{{
|
||||
def _run(self):
|
||||
if self.update_homebrew:
|
||||
self._update_homebrew()
|
||||
|
||||
if self.upgrade_all:
|
||||
self._upgrade_all()
|
||||
|
||||
if self.packages:
|
||||
if self.state == 'installed':
|
||||
return self._install_packages()
|
||||
elif self.state == 'upgraded':
|
||||
return self._upgrade_packages()
|
||||
elif self.state == 'head':
|
||||
return self._install_packages()
|
||||
elif self.state == 'linked':
|
||||
return self._link_packages()
|
||||
elif self.state == 'unlinked':
|
||||
return self._unlink_packages()
|
||||
elif self.state == 'absent':
|
||||
return self._uninstall_packages()
|
||||
|
||||
# updated -------------------------------- {{{
|
||||
def _update_homebrew(self):
|
||||
if self.module.check_mode:
|
||||
self.changed = True
|
||||
self.message = 'Homebrew would be updated.'
|
||||
raise HomebrewException(self.message)
|
||||
rc, out, err = self.module.run_command([
|
||||
self.brew_path,
|
||||
'update',
|
||||
])
|
||||
if rc == 0:
|
||||
if out and isinstance(out, string_types):
|
||||
already_updated = any(
|
||||
re.search(r'Already up-to-date.', s.strip(), re.IGNORECASE)
|
||||
for s in out.split('\n')
|
||||
if s
|
||||
)
|
||||
if not already_updated:
|
||||
self.changed = True
|
||||
self.message = 'Homebrew updated successfully.'
|
||||
else:
|
||||
self.message = 'Homebrew already up-to-date.'
|
||||
|
||||
return True
|
||||
else:
|
||||
self.failed = True
|
||||
self.message = err.strip()
|
||||
raise HomebrewException(self.message)
|
||||
# /updated ------------------------------- }}}
|
||||
|
||||
# _upgrade_all --------------------------- {{{
|
||||
def _upgrade_all(self):
|
||||
if self.module.check_mode:
|
||||
self.changed = True
|
||||
self.message = 'Homebrew packages would be upgraded.'
|
||||
raise HomebrewException(self.message)
|
||||
rc, out, err = self.module.run_command([
|
||||
self.brew_path,
|
||||
'upgrade',
|
||||
])
|
||||
if rc == 0:
|
||||
if not out:
|
||||
self.message = 'Homebrew packages already upgraded.'
|
||||
|
||||
else:
|
||||
self.changed = True
|
||||
self.message = 'Homebrew upgraded.'
|
||||
|
||||
return True
|
||||
else:
|
||||
self.failed = True
|
||||
self.message = err.strip()
|
||||
raise HomebrewException(self.message)
|
||||
# /_upgrade_all -------------------------- }}}
|
||||
|
||||
# installed ------------------------------ {{{
|
||||
def _install_current_package(self):
|
||||
if not self.valid_package(self.current_package):
|
||||
self.failed = True
|
||||
self.message = 'Invalid package: {0}.'.format(self.current_package)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
if self._current_package_is_installed():
|
||||
self.unchanged_count += 1
|
||||
self.message = 'Package already installed: {0}'.format(
|
||||
self.current_package,
|
||||
)
|
||||
return True
|
||||
|
||||
if self.module.check_mode:
|
||||
self.changed = True
|
||||
self.message = 'Package would be installed: {0}'.format(
|
||||
self.current_package
|
||||
)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
if self.state == 'head':
|
||||
head = '--HEAD'
|
||||
else:
|
||||
head = None
|
||||
|
||||
opts = (
|
||||
[self.brew_path, 'install']
|
||||
+ self.install_options
|
||||
+ [self.current_package, head]
|
||||
)
|
||||
cmd = [opt for opt in opts if opt]
|
||||
rc, out, err = self.module.run_command(cmd)
|
||||
|
||||
if self._current_package_is_installed():
|
||||
self.changed_count += 1
|
||||
self.changed = True
|
||||
self.message = 'Package installed: {0}'.format(self.current_package)
|
||||
return True
|
||||
else:
|
||||
self.failed = True
|
||||
self.message = err.strip()
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
def _install_packages(self):
|
||||
for package in self.packages:
|
||||
self.current_package = package
|
||||
self._install_current_package()
|
||||
|
||||
return True
|
||||
# /installed ----------------------------- }}}
|
||||
|
||||
# upgraded ------------------------------- {{{
|
||||
def _upgrade_current_package(self):
|
||||
command = 'upgrade'
|
||||
|
||||
if not self.valid_package(self.current_package):
|
||||
self.failed = True
|
||||
self.message = 'Invalid package: {0}.'.format(self.current_package)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
if not self._current_package_is_installed():
|
||||
command = 'install'
|
||||
|
||||
if self._current_package_is_installed() and not self._current_package_is_outdated():
|
||||
self.message = 'Package is already upgraded: {0}'.format(
|
||||
self.current_package,
|
||||
)
|
||||
self.unchanged_count += 1
|
||||
return True
|
||||
|
||||
if self.module.check_mode:
|
||||
self.changed = True
|
||||
self.message = 'Package would be upgraded: {0}'.format(
|
||||
self.current_package
|
||||
)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
opts = (
|
||||
[self.brew_path, command]
|
||||
+ self.install_options
|
||||
+ [self.current_package]
|
||||
)
|
||||
cmd = [opt for opt in opts if opt]
|
||||
rc, out, err = self.module.run_command(cmd)
|
||||
|
||||
if self._current_package_is_installed() and not self._current_package_is_outdated():
|
||||
self.changed_count += 1
|
||||
self.changed = True
|
||||
self.message = 'Package upgraded: {0}'.format(self.current_package)
|
||||
return True
|
||||
else:
|
||||
self.failed = True
|
||||
self.message = err.strip()
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
def _upgrade_all_packages(self):
|
||||
opts = (
|
||||
[self.brew_path, 'upgrade']
|
||||
+ self.install_options
|
||||
)
|
||||
cmd = [opt for opt in opts if opt]
|
||||
rc, out, err = self.module.run_command(cmd)
|
||||
|
||||
if rc == 0:
|
||||
self.changed = True
|
||||
self.message = 'All packages upgraded.'
|
||||
return True
|
||||
else:
|
||||
self.failed = True
|
||||
self.message = err.strip()
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
def _upgrade_packages(self):
|
||||
if not self.packages:
|
||||
self._upgrade_all_packages()
|
||||
else:
|
||||
for package in self.packages:
|
||||
self.current_package = package
|
||||
self._upgrade_current_package()
|
||||
return True
|
||||
# /upgraded ------------------------------ }}}
|
||||
|
||||
# uninstalled ---------------------------- {{{
|
||||
def _uninstall_current_package(self):
|
||||
if not self.valid_package(self.current_package):
|
||||
self.failed = True
|
||||
self.message = 'Invalid package: {0}.'.format(self.current_package)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
if not self._current_package_is_installed():
|
||||
self.unchanged_count += 1
|
||||
self.message = 'Package already uninstalled: {0}'.format(
|
||||
self.current_package,
|
||||
)
|
||||
return True
|
||||
|
||||
if self.module.check_mode:
|
||||
self.changed = True
|
||||
self.message = 'Package would be uninstalled: {0}'.format(
|
||||
self.current_package
|
||||
)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
opts = (
|
||||
[self.brew_path, 'uninstall', '--force']
|
||||
+ self.install_options
|
||||
+ [self.current_package]
|
||||
)
|
||||
cmd = [opt for opt in opts if opt]
|
||||
rc, out, err = self.module.run_command(cmd)
|
||||
|
||||
if not self._current_package_is_installed():
|
||||
self.changed_count += 1
|
||||
self.changed = True
|
||||
self.message = 'Package uninstalled: {0}'.format(self.current_package)
|
||||
return True
|
||||
else:
|
||||
self.failed = True
|
||||
self.message = err.strip()
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
def _uninstall_packages(self):
|
||||
for package in self.packages:
|
||||
self.current_package = package
|
||||
self._uninstall_current_package()
|
||||
|
||||
return True
|
||||
# /uninstalled ----------------------------- }}}
|
||||
|
||||
# linked --------------------------------- {{{
|
||||
def _link_current_package(self):
|
||||
if not self.valid_package(self.current_package):
|
||||
self.failed = True
|
||||
self.message = 'Invalid package: {0}.'.format(self.current_package)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
if not self._current_package_is_installed():
|
||||
self.failed = True
|
||||
self.message = 'Package not installed: {0}.'.format(self.current_package)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
if self.module.check_mode:
|
||||
self.changed = True
|
||||
self.message = 'Package would be linked: {0}'.format(
|
||||
self.current_package
|
||||
)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
opts = (
|
||||
[self.brew_path, 'link']
|
||||
+ self.install_options
|
||||
+ [self.current_package]
|
||||
)
|
||||
cmd = [opt for opt in opts if opt]
|
||||
rc, out, err = self.module.run_command(cmd)
|
||||
|
||||
if rc == 0:
|
||||
self.changed_count += 1
|
||||
self.changed = True
|
||||
self.message = 'Package linked: {0}'.format(self.current_package)
|
||||
|
||||
return True
|
||||
else:
|
||||
self.failed = True
|
||||
self.message = 'Package could not be linked: {0}.'.format(self.current_package)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
def _link_packages(self):
|
||||
for package in self.packages:
|
||||
self.current_package = package
|
||||
self._link_current_package()
|
||||
|
||||
return True
|
||||
# /linked -------------------------------- }}}
|
||||
|
||||
# unlinked ------------------------------- {{{
|
||||
def _unlink_current_package(self):
|
||||
if not self.valid_package(self.current_package):
|
||||
self.failed = True
|
||||
self.message = 'Invalid package: {0}.'.format(self.current_package)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
if not self._current_package_is_installed():
|
||||
self.failed = True
|
||||
self.message = 'Package not installed: {0}.'.format(self.current_package)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
if self.module.check_mode:
|
||||
self.changed = True
|
||||
self.message = 'Package would be unlinked: {0}'.format(
|
||||
self.current_package
|
||||
)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
opts = (
|
||||
[self.brew_path, 'unlink']
|
||||
+ self.install_options
|
||||
+ [self.current_package]
|
||||
)
|
||||
cmd = [opt for opt in opts if opt]
|
||||
rc, out, err = self.module.run_command(cmd)
|
||||
|
||||
if rc == 0:
|
||||
self.changed_count += 1
|
||||
self.changed = True
|
||||
self.message = 'Package unlinked: {0}'.format(self.current_package)
|
||||
|
||||
return True
|
||||
else:
|
||||
self.failed = True
|
||||
self.message = 'Package could not be unlinked: {0}.'.format(self.current_package)
|
||||
raise HomebrewException(self.message)
|
||||
|
||||
def _unlink_packages(self):
|
||||
for package in self.packages:
|
||||
self.current_package = package
|
||||
self._unlink_current_package()
|
||||
|
||||
return True
|
||||
# /unlinked ------------------------------ }}}
|
||||
# /commands ---------------------------------------------------- }}}
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(
|
||||
aliases=["pkg", "package", "formula"],
|
||||
required=False,
|
||||
type='list',
|
||||
elements='str',
|
||||
),
|
||||
path=dict(
|
||||
default="/usr/local/bin",
|
||||
required=False,
|
||||
type='path',
|
||||
),
|
||||
state=dict(
|
||||
default="present",
|
||||
choices=[
|
||||
"present", "installed",
|
||||
"latest", "upgraded", "head",
|
||||
"linked", "unlinked",
|
||||
"absent", "removed", "uninstalled",
|
||||
],
|
||||
),
|
||||
update_homebrew=dict(
|
||||
default=False,
|
||||
aliases=["update-brew"],
|
||||
type='bool',
|
||||
),
|
||||
upgrade_all=dict(
|
||||
default=False,
|
||||
aliases=["upgrade"],
|
||||
type='bool',
|
||||
),
|
||||
install_options=dict(
|
||||
default=None,
|
||||
aliases=['options'],
|
||||
type='list',
|
||||
)
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
|
||||
|
||||
p = module.params
|
||||
|
||||
if p['name']:
|
||||
packages = p['name']
|
||||
else:
|
||||
packages = None
|
||||
|
||||
path = p['path']
|
||||
if path:
|
||||
path = path.split(':')
|
||||
|
||||
state = p['state']
|
||||
if state in ('present', 'installed'):
|
||||
state = 'installed'
|
||||
if state in ('head', ):
|
||||
state = 'head'
|
||||
if state in ('latest', 'upgraded'):
|
||||
state = 'upgraded'
|
||||
if state == 'linked':
|
||||
state = 'linked'
|
||||
if state == 'unlinked':
|
||||
state = 'unlinked'
|
||||
if state in ('absent', 'removed', 'uninstalled'):
|
||||
state = 'absent'
|
||||
|
||||
update_homebrew = p['update_homebrew']
|
||||
upgrade_all = p['upgrade_all']
|
||||
p['install_options'] = p['install_options'] or []
|
||||
install_options = ['--{0}'.format(install_option)
|
||||
for install_option in p['install_options']]
|
||||
|
||||
brew = Homebrew(module=module, path=path, packages=packages,
|
||||
state=state, update_homebrew=update_homebrew,
|
||||
upgrade_all=upgrade_all, install_options=install_options)
|
||||
(failed, changed, message) = brew.run()
|
||||
if failed:
|
||||
module.fail_json(msg=message)
|
||||
else:
|
||||
module.exit_json(changed=changed, msg=message)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
846
plugins/modules/packaging/os/homebrew_cask.py
Normal file
846
plugins/modules/packaging/os/homebrew_cask.py
Normal file
@@ -0,0 +1,846 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2013, Daniel Jaouen <dcj24@cornell.edu>
|
||||
# Copyright: (c) 2016, Indrajit Raychaudhuri <irc+code@indrajit.com>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: homebrew_cask
|
||||
author:
|
||||
- "Indrajit Raychaudhuri (@indrajitr)"
|
||||
- "Daniel Jaouen (@danieljaouen)"
|
||||
- "Enric Lluelles (@enriclluelles)"
|
||||
requirements:
|
||||
- "python >= 2.6"
|
||||
short_description: Install and uninstall homebrew casks.
|
||||
description:
|
||||
- Manages Homebrew casks.
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- Name of cask to install or remove.
|
||||
required: true
|
||||
aliases: ['pkg', 'package', 'cask']
|
||||
type: list
|
||||
path:
|
||||
description:
|
||||
- "':' separated list of paths to search for 'brew' executable."
|
||||
default: '/usr/local/bin'
|
||||
type: path
|
||||
state:
|
||||
description:
|
||||
- State of the cask.
|
||||
choices: [ 'present', 'absent', 'upgraded' ]
|
||||
default: present
|
||||
type: str
|
||||
sudo_password:
|
||||
description:
|
||||
- The sudo password to be passed to SUDO_ASKPASS.
|
||||
required: false
|
||||
type: str
|
||||
update_homebrew:
|
||||
description:
|
||||
- Update homebrew itself first.
|
||||
- Note that C(brew cask update) is a synonym for C(brew update).
|
||||
type: bool
|
||||
default: 'no'
|
||||
aliases: ['update-brew']
|
||||
install_options:
|
||||
description:
|
||||
- Options flags to install a package.
|
||||
aliases: ['options']
|
||||
type: list
|
||||
accept_external_apps:
|
||||
description:
|
||||
- Allow external apps.
|
||||
type: bool
|
||||
default: 'no'
|
||||
upgrade_all:
|
||||
description:
|
||||
- Upgrade all casks.
|
||||
- Mutually exclusive with C(upgraded) state.
|
||||
type: bool
|
||||
default: 'no'
|
||||
aliases: ['upgrade']
|
||||
greedy:
|
||||
description:
|
||||
- Upgrade casks that auto update.
|
||||
- Passes --greedy to brew cask outdated when checking
|
||||
if an installed cask has a newer version available.
|
||||
type: bool
|
||||
default: 'no'
|
||||
'''
|
||||
EXAMPLES = '''
|
||||
- name: Install cask
|
||||
homebrew_cask:
|
||||
name: alfred
|
||||
state: present
|
||||
|
||||
- name: Remove cask
|
||||
homebrew_cask:
|
||||
name: alfred
|
||||
state: absent
|
||||
|
||||
- name: Install cask with install options
|
||||
homebrew_cask:
|
||||
name: alfred
|
||||
state: present
|
||||
install_options: 'appdir=/Applications'
|
||||
|
||||
- name: Install cask with install options
|
||||
homebrew_cask:
|
||||
name: alfred
|
||||
state: present
|
||||
install_options: 'debug,appdir=/Applications'
|
||||
|
||||
- name: Allow external app
|
||||
homebrew_cask:
|
||||
name: alfred
|
||||
state: present
|
||||
accept_external_apps: True
|
||||
|
||||
- name: Remove cask with force option
|
||||
homebrew_cask:
|
||||
name: alfred
|
||||
state: absent
|
||||
install_options: force
|
||||
|
||||
- name: Upgrade all casks
|
||||
homebrew_cask:
|
||||
upgrade_all: true
|
||||
|
||||
- name: Upgrade given cask with force option
|
||||
homebrew_cask:
|
||||
name: alfred
|
||||
state: upgraded
|
||||
install_options: force
|
||||
|
||||
- name: Upgrade cask with greedy option
|
||||
homebrew_cask:
|
||||
name: 1password
|
||||
state: upgraded
|
||||
greedy: True
|
||||
|
||||
- name: Using sudo password for installing cask
|
||||
homebrew_cask:
|
||||
name: wireshark
|
||||
state: present
|
||||
sudo_password: "{{ ansible_become_pass }}"
|
||||
'''
|
||||
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
|
||||
from ansible.module_utils._text import to_bytes
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six import iteritems, string_types
|
||||
|
||||
|
||||
# exceptions -------------------------------------------------------------- {{{
|
||||
class HomebrewCaskException(Exception):
|
||||
pass
|
||||
# /exceptions ------------------------------------------------------------- }}}
|
||||
|
||||
|
||||
# utils ------------------------------------------------------------------- {{{
|
||||
def _create_regex_group(s):
|
||||
lines = (line.strip() for line in s.split('\n') if line.strip())
|
||||
chars = filter(None, (line.split('#')[0].strip() for line in lines))
|
||||
group = r'[^' + r''.join(chars) + r']'
|
||||
return re.compile(group)
|
||||
# /utils ------------------------------------------------------------------ }}}
|
||||
|
||||
|
||||
class HomebrewCask(object):
|
||||
'''A class to manage Homebrew casks.'''
|
||||
|
||||
# class regexes ------------------------------------------------ {{{
|
||||
VALID_PATH_CHARS = r'''
|
||||
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
|
||||
\s # spaces
|
||||
: # colons
|
||||
{sep} # the OS-specific path separator
|
||||
. # dots
|
||||
- # dashes
|
||||
'''.format(sep=os.path.sep)
|
||||
|
||||
VALID_BREW_PATH_CHARS = r'''
|
||||
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
|
||||
\s # spaces
|
||||
{sep} # the OS-specific path separator
|
||||
. # dots
|
||||
- # dashes
|
||||
'''.format(sep=os.path.sep)
|
||||
|
||||
VALID_CASK_CHARS = r'''
|
||||
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
|
||||
. # dots
|
||||
/ # slash (for taps)
|
||||
- # dashes
|
||||
'''
|
||||
|
||||
INVALID_PATH_REGEX = _create_regex_group(VALID_PATH_CHARS)
|
||||
INVALID_BREW_PATH_REGEX = _create_regex_group(VALID_BREW_PATH_CHARS)
|
||||
INVALID_CASK_REGEX = _create_regex_group(VALID_CASK_CHARS)
|
||||
# /class regexes ----------------------------------------------- }}}
|
||||
|
||||
# class validations -------------------------------------------- {{{
|
||||
@classmethod
|
||||
def valid_path(cls, path):
|
||||
'''
|
||||
`path` must be one of:
|
||||
- list of paths
|
||||
- a string containing only:
|
||||
- alphanumeric characters
|
||||
- dashes
|
||||
- dots
|
||||
- spaces
|
||||
- colons
|
||||
- os.path.sep
|
||||
'''
|
||||
|
||||
if isinstance(path, (string_types)):
|
||||
return not cls.INVALID_PATH_REGEX.search(path)
|
||||
|
||||
try:
|
||||
iter(path)
|
||||
except TypeError:
|
||||
return False
|
||||
else:
|
||||
paths = path
|
||||
return all(cls.valid_brew_path(path_) for path_ in paths)
|
||||
|
||||
@classmethod
|
||||
def valid_brew_path(cls, brew_path):
|
||||
'''
|
||||
`brew_path` must be one of:
|
||||
- None
|
||||
- a string containing only:
|
||||
- alphanumeric characters
|
||||
- dashes
|
||||
- dots
|
||||
- spaces
|
||||
- os.path.sep
|
||||
'''
|
||||
|
||||
if brew_path is None:
|
||||
return True
|
||||
|
||||
return (
|
||||
isinstance(brew_path, string_types)
|
||||
and not cls.INVALID_BREW_PATH_REGEX.search(brew_path)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def valid_cask(cls, cask):
|
||||
'''A valid cask is either None or alphanumeric + backslashes.'''
|
||||
|
||||
if cask is None:
|
||||
return True
|
||||
|
||||
return (
|
||||
isinstance(cask, string_types)
|
||||
and not cls.INVALID_CASK_REGEX.search(cask)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def valid_state(cls, state):
|
||||
'''
|
||||
A valid state is one of:
|
||||
- installed
|
||||
- absent
|
||||
'''
|
||||
|
||||
if state is None:
|
||||
return True
|
||||
else:
|
||||
return (
|
||||
isinstance(state, string_types)
|
||||
and state.lower() in (
|
||||
'installed',
|
||||
'absent',
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def valid_module(cls, module):
|
||||
'''A valid module is an instance of AnsibleModule.'''
|
||||
|
||||
return isinstance(module, AnsibleModule)
|
||||
# /class validations ------------------------------------------- }}}
|
||||
|
||||
# class properties --------------------------------------------- {{{
|
||||
@property
|
||||
def module(self):
|
||||
return self._module
|
||||
|
||||
@module.setter
|
||||
def module(self, module):
|
||||
if not self.valid_module(module):
|
||||
self._module = None
|
||||
self.failed = True
|
||||
self.message = 'Invalid module: {0}.'.format(module)
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
else:
|
||||
self._module = module
|
||||
return module
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return self._path
|
||||
|
||||
@path.setter
|
||||
def path(self, path):
|
||||
if not self.valid_path(path):
|
||||
self._path = []
|
||||
self.failed = True
|
||||
self.message = 'Invalid path: {0}.'.format(path)
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
else:
|
||||
if isinstance(path, string_types):
|
||||
self._path = path.split(':')
|
||||
else:
|
||||
self._path = path
|
||||
|
||||
return path
|
||||
|
||||
@property
|
||||
def brew_path(self):
|
||||
return self._brew_path
|
||||
|
||||
@brew_path.setter
|
||||
def brew_path(self, brew_path):
|
||||
if not self.valid_brew_path(brew_path):
|
||||
self._brew_path = None
|
||||
self.failed = True
|
||||
self.message = 'Invalid brew_path: {0}.'.format(brew_path)
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
else:
|
||||
self._brew_path = brew_path
|
||||
return brew_path
|
||||
|
||||
@property
|
||||
def params(self):
|
||||
return self._params
|
||||
|
||||
@params.setter
|
||||
def params(self, params):
|
||||
self._params = self.module.params
|
||||
return self._params
|
||||
|
||||
@property
|
||||
def current_cask(self):
|
||||
return self._current_cask
|
||||
|
||||
@current_cask.setter
|
||||
def current_cask(self, cask):
|
||||
if not self.valid_cask(cask):
|
||||
self._current_cask = None
|
||||
self.failed = True
|
||||
self.message = 'Invalid cask: {0}.'.format(cask)
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
else:
|
||||
self._current_cask = cask
|
||||
return cask
|
||||
# /class properties -------------------------------------------- }}}
|
||||
|
||||
def __init__(self, module, path=path, casks=None, state=None,
|
||||
sudo_password=None, update_homebrew=False,
|
||||
install_options=None, accept_external_apps=False,
|
||||
upgrade_all=False, greedy=False):
|
||||
if not install_options:
|
||||
install_options = list()
|
||||
self._setup_status_vars()
|
||||
self._setup_instance_vars(module=module, path=path, casks=casks,
|
||||
state=state, sudo_password=sudo_password,
|
||||
update_homebrew=update_homebrew,
|
||||
install_options=install_options,
|
||||
accept_external_apps=accept_external_apps,
|
||||
upgrade_all=upgrade_all,
|
||||
greedy=greedy, )
|
||||
|
||||
self._prep()
|
||||
|
||||
# prep --------------------------------------------------------- {{{
|
||||
def _setup_status_vars(self):
|
||||
self.failed = False
|
||||
self.changed = False
|
||||
self.changed_count = 0
|
||||
self.unchanged_count = 0
|
||||
self.message = ''
|
||||
|
||||
def _setup_instance_vars(self, **kwargs):
|
||||
for key, val in iteritems(kwargs):
|
||||
setattr(self, key, val)
|
||||
|
||||
def _prep(self):
|
||||
self._prep_brew_path()
|
||||
|
||||
def _prep_brew_path(self):
|
||||
if not self.module:
|
||||
self.brew_path = None
|
||||
self.failed = True
|
||||
self.message = 'AnsibleModule not set.'
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
self.brew_path = self.module.get_bin_path(
|
||||
'brew',
|
||||
required=True,
|
||||
opt_dirs=self.path,
|
||||
)
|
||||
if not self.brew_path:
|
||||
self.brew_path = None
|
||||
self.failed = True
|
||||
self.message = 'Unable to locate homebrew executable.'
|
||||
raise HomebrewCaskException('Unable to locate homebrew executable.')
|
||||
|
||||
return self.brew_path
|
||||
|
||||
def _status(self):
|
||||
return (self.failed, self.changed, self.message)
|
||||
# /prep -------------------------------------------------------- }}}
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self._run()
|
||||
except HomebrewCaskException:
|
||||
pass
|
||||
|
||||
if not self.failed and (self.changed_count + self.unchanged_count > 1):
|
||||
self.message = "Changed: %d, Unchanged: %d" % (
|
||||
self.changed_count,
|
||||
self.unchanged_count,
|
||||
)
|
||||
(failed, changed, message) = self._status()
|
||||
|
||||
return (failed, changed, message)
|
||||
|
||||
# checks ------------------------------------------------------- {{{
|
||||
def _current_cask_is_outdated(self):
|
||||
if not self.valid_cask(self.current_cask):
|
||||
return False
|
||||
|
||||
cask_is_outdated_command = (
|
||||
[
|
||||
self.brew_path,
|
||||
'cask',
|
||||
'outdated',
|
||||
]
|
||||
+ (['--greedy'] if self.greedy else [])
|
||||
+ [self.current_cask]
|
||||
)
|
||||
|
||||
rc, out, err = self.module.run_command(cask_is_outdated_command)
|
||||
|
||||
return out != ""
|
||||
|
||||
def _current_cask_is_installed(self):
|
||||
if not self.valid_cask(self.current_cask):
|
||||
self.failed = True
|
||||
self.message = 'Invalid cask: {0}.'.format(self.current_cask)
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
cmd = [
|
||||
"{brew_path}".format(brew_path=self.brew_path),
|
||||
"cask",
|
||||
"list",
|
||||
self.current_cask
|
||||
]
|
||||
rc, out, err = self.module.run_command(cmd)
|
||||
|
||||
if rc == 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
# /checks ------------------------------------------------------ }}}
|
||||
|
||||
# commands ----------------------------------------------------- {{{
|
||||
def _run(self):
|
||||
if self.upgrade_all:
|
||||
return self._upgrade_all()
|
||||
|
||||
if self.casks:
|
||||
if self.state == 'installed':
|
||||
return self._install_casks()
|
||||
elif self.state == 'upgraded':
|
||||
return self._upgrade_casks()
|
||||
elif self.state == 'absent':
|
||||
return self._uninstall_casks()
|
||||
|
||||
self.failed = True
|
||||
self.message = "You must select a cask to install."
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
# sudo_password fix ---------------------- {{{
|
||||
def _run_command_with_sudo_password(self, cmd):
|
||||
rc, out, err = '', '', ''
|
||||
|
||||
with tempfile.NamedTemporaryFile() as sudo_askpass_file:
|
||||
sudo_askpass_file.write(b"#!/bin/sh\n\necho '%s'\n" % to_bytes(self.sudo_password))
|
||||
os.chmod(sudo_askpass_file.name, 0o700)
|
||||
sudo_askpass_file.file.close()
|
||||
|
||||
rc, out, err = self.module.run_command(
|
||||
cmd,
|
||||
environ_update={'SUDO_ASKPASS': sudo_askpass_file.name}
|
||||
)
|
||||
|
||||
self.module.add_cleanup_file(sudo_askpass_file.name)
|
||||
|
||||
return (rc, out, err)
|
||||
# /sudo_password fix --------------------- }}}
|
||||
|
||||
# updated -------------------------------- {{{
|
||||
def _update_homebrew(self):
|
||||
rc, out, err = self.module.run_command([
|
||||
self.brew_path,
|
||||
'update',
|
||||
])
|
||||
if rc == 0:
|
||||
if out and isinstance(out, string_types):
|
||||
already_updated = any(
|
||||
re.search(r'Already up-to-date.', s.strip(), re.IGNORECASE)
|
||||
for s in out.split('\n')
|
||||
if s
|
||||
)
|
||||
if not already_updated:
|
||||
self.changed = True
|
||||
self.message = 'Homebrew updated successfully.'
|
||||
else:
|
||||
self.message = 'Homebrew already up-to-date.'
|
||||
|
||||
return True
|
||||
else:
|
||||
self.failed = True
|
||||
self.message = err.strip()
|
||||
raise HomebrewCaskException(self.message)
|
||||
# /updated ------------------------------- }}}
|
||||
|
||||
# _upgrade_all --------------------------- {{{
|
||||
def _upgrade_all(self):
|
||||
if self.module.check_mode:
|
||||
self.changed = True
|
||||
self.message = 'Casks would be upgraded.'
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
opts = (
|
||||
[self.brew_path, 'cask', 'upgrade']
|
||||
)
|
||||
|
||||
cmd = [opt for opt in opts if opt]
|
||||
|
||||
rc, out, err = '', '', ''
|
||||
|
||||
if self.sudo_password:
|
||||
rc, out, err = self._run_command_with_sudo_password(cmd)
|
||||
else:
|
||||
rc, out, err = self.module.run_command(cmd)
|
||||
|
||||
if rc == 0:
|
||||
if re.search(r'==> No Casks to upgrade', out.strip(), re.IGNORECASE):
|
||||
self.message = 'Homebrew casks already upgraded.'
|
||||
|
||||
else:
|
||||
self.changed = True
|
||||
self.message = 'Homebrew casks upgraded.'
|
||||
|
||||
return True
|
||||
else:
|
||||
self.failed = True
|
||||
self.message = err.strip()
|
||||
raise HomebrewCaskException(self.message)
|
||||
# /_upgrade_all -------------------------- }}}
|
||||
|
||||
# installed ------------------------------ {{{
|
||||
def _install_current_cask(self):
|
||||
if not self.valid_cask(self.current_cask):
|
||||
self.failed = True
|
||||
self.message = 'Invalid cask: {0}.'.format(self.current_cask)
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
if self._current_cask_is_installed():
|
||||
self.unchanged_count += 1
|
||||
self.message = 'Cask already installed: {0}'.format(
|
||||
self.current_cask,
|
||||
)
|
||||
return True
|
||||
|
||||
if self.module.check_mode:
|
||||
self.changed = True
|
||||
self.message = 'Cask would be installed: {0}'.format(
|
||||
self.current_cask
|
||||
)
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
opts = (
|
||||
[self.brew_path, 'cask', 'install', self.current_cask]
|
||||
+ self.install_options
|
||||
)
|
||||
|
||||
cmd = [opt for opt in opts if opt]
|
||||
|
||||
rc, out, err = '', '', ''
|
||||
|
||||
if self.sudo_password:
|
||||
rc, out, err = self._run_command_with_sudo_password(cmd)
|
||||
else:
|
||||
rc, out, err = self.module.run_command(cmd)
|
||||
|
||||
if self._current_cask_is_installed():
|
||||
self.changed_count += 1
|
||||
self.changed = True
|
||||
self.message = 'Cask installed: {0}'.format(self.current_cask)
|
||||
return True
|
||||
elif self.accept_external_apps and re.search(r"Error: It seems there is already an App at", err):
|
||||
self.unchanged_count += 1
|
||||
self.message = 'Cask already installed: {0}'.format(
|
||||
self.current_cask,
|
||||
)
|
||||
return True
|
||||
else:
|
||||
self.failed = True
|
||||
self.message = err.strip()
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
def _install_casks(self):
|
||||
for cask in self.casks:
|
||||
self.current_cask = cask
|
||||
self._install_current_cask()
|
||||
|
||||
return True
|
||||
# /installed ----------------------------- }}}
|
||||
|
||||
# upgraded ------------------------------- {{{
|
||||
def _upgrade_current_cask(self):
|
||||
command = 'upgrade'
|
||||
|
||||
if not self.valid_cask(self.current_cask):
|
||||
self.failed = True
|
||||
self.message = 'Invalid cask: {0}.'.format(self.current_cask)
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
if not self._current_cask_is_installed():
|
||||
command = 'install'
|
||||
|
||||
if self._current_cask_is_installed() and not self._current_cask_is_outdated():
|
||||
self.message = 'Cask is already upgraded: {0}'.format(
|
||||
self.current_cask,
|
||||
)
|
||||
self.unchanged_count += 1
|
||||
return True
|
||||
|
||||
if self.module.check_mode:
|
||||
self.changed = True
|
||||
self.message = 'Cask would be upgraded: {0}'.format(
|
||||
self.current_cask
|
||||
)
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
opts = (
|
||||
[self.brew_path, 'cask', command]
|
||||
+ self.install_options
|
||||
+ [self.current_cask]
|
||||
)
|
||||
cmd = [opt for opt in opts if opt]
|
||||
|
||||
rc, out, err = '', '', ''
|
||||
|
||||
if self.sudo_password:
|
||||
rc, out, err = self._run_command_with_sudo_password(cmd)
|
||||
else:
|
||||
rc, out, err = self.module.run_command(cmd)
|
||||
|
||||
if self._current_cask_is_installed() and not self._current_cask_is_outdated():
|
||||
self.changed_count += 1
|
||||
self.changed = True
|
||||
self.message = 'Cask upgraded: {0}'.format(self.current_cask)
|
||||
return True
|
||||
else:
|
||||
self.failed = True
|
||||
self.message = err.strip()
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
def _upgrade_casks(self):
|
||||
for cask in self.casks:
|
||||
self.current_cask = cask
|
||||
self._upgrade_current_cask()
|
||||
|
||||
return True
|
||||
# /upgraded ------------------------------ }}}
|
||||
|
||||
# uninstalled ---------------------------- {{{
|
||||
def _uninstall_current_cask(self):
|
||||
if not self.valid_cask(self.current_cask):
|
||||
self.failed = True
|
||||
self.message = 'Invalid cask: {0}.'.format(self.current_cask)
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
if not self._current_cask_is_installed():
|
||||
self.unchanged_count += 1
|
||||
self.message = 'Cask already uninstalled: {0}'.format(
|
||||
self.current_cask,
|
||||
)
|
||||
return True
|
||||
|
||||
if self.module.check_mode:
|
||||
self.changed = True
|
||||
self.message = 'Cask would be uninstalled: {0}'.format(
|
||||
self.current_cask
|
||||
)
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
opts = (
|
||||
[self.brew_path, 'cask', 'uninstall', self.current_cask]
|
||||
+ self.install_options
|
||||
)
|
||||
|
||||
cmd = [opt for opt in opts if opt]
|
||||
|
||||
rc, out, err = '', '', ''
|
||||
|
||||
if self.sudo_password:
|
||||
rc, out, err = self._run_command_with_sudo_password(cmd)
|
||||
else:
|
||||
rc, out, err = self.module.run_command(cmd)
|
||||
|
||||
if not self._current_cask_is_installed():
|
||||
self.changed_count += 1
|
||||
self.changed = True
|
||||
self.message = 'Cask uninstalled: {0}'.format(self.current_cask)
|
||||
return True
|
||||
else:
|
||||
self.failed = True
|
||||
self.message = err.strip()
|
||||
raise HomebrewCaskException(self.message)
|
||||
|
||||
def _uninstall_casks(self):
|
||||
for cask in self.casks:
|
||||
self.current_cask = cask
|
||||
self._uninstall_current_cask()
|
||||
|
||||
return True
|
||||
# /uninstalled --------------------------- }}}
|
||||
# /commands ---------------------------------------------------- }}}
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(
|
||||
aliases=["pkg", "package", "cask"],
|
||||
required=False,
|
||||
type='list',
|
||||
),
|
||||
path=dict(
|
||||
default="/usr/local/bin",
|
||||
required=False,
|
||||
type='path',
|
||||
),
|
||||
state=dict(
|
||||
default="present",
|
||||
choices=[
|
||||
"present", "installed",
|
||||
"latest", "upgraded",
|
||||
"absent", "removed", "uninstalled",
|
||||
],
|
||||
),
|
||||
sudo_password=dict(
|
||||
type="str",
|
||||
required=False,
|
||||
no_log=True,
|
||||
),
|
||||
update_homebrew=dict(
|
||||
default=False,
|
||||
aliases=["update-brew"],
|
||||
type='bool',
|
||||
),
|
||||
install_options=dict(
|
||||
default=None,
|
||||
aliases=['options'],
|
||||
type='list',
|
||||
),
|
||||
accept_external_apps=dict(
|
||||
default=False,
|
||||
type='bool',
|
||||
),
|
||||
upgrade_all=dict(
|
||||
default=False,
|
||||
aliases=["upgrade"],
|
||||
type='bool',
|
||||
),
|
||||
greedy=dict(
|
||||
default=False,
|
||||
type='bool',
|
||||
),
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
|
||||
|
||||
p = module.params
|
||||
|
||||
if p['name']:
|
||||
casks = p['name']
|
||||
else:
|
||||
casks = None
|
||||
|
||||
path = p['path']
|
||||
if path:
|
||||
path = path.split(':')
|
||||
|
||||
state = p['state']
|
||||
if state in ('present', 'installed'):
|
||||
state = 'installed'
|
||||
if state in ('latest', 'upgraded'):
|
||||
state = 'upgraded'
|
||||
if state in ('absent', 'removed', 'uninstalled'):
|
||||
state = 'absent'
|
||||
|
||||
sudo_password = p['sudo_password']
|
||||
|
||||
update_homebrew = p['update_homebrew']
|
||||
upgrade_all = p['upgrade_all']
|
||||
greedy = p['greedy']
|
||||
p['install_options'] = p['install_options'] or []
|
||||
install_options = ['--{0}'.format(install_option)
|
||||
for install_option in p['install_options']]
|
||||
|
||||
accept_external_apps = p['accept_external_apps']
|
||||
|
||||
brew_cask = HomebrewCask(module=module, path=path, casks=casks,
|
||||
state=state, sudo_password=sudo_password,
|
||||
update_homebrew=update_homebrew,
|
||||
install_options=install_options,
|
||||
accept_external_apps=accept_external_apps,
|
||||
upgrade_all=upgrade_all,
|
||||
greedy=greedy,
|
||||
)
|
||||
(failed, changed, message) = brew_cask.run()
|
||||
if failed:
|
||||
module.fail_json(msg=message)
|
||||
else:
|
||||
module.exit_json(changed=changed, msg=message)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
253
plugins/modules/packaging/os/homebrew_tap.py
Normal file
253
plugins/modules/packaging/os/homebrew_tap.py
Normal file
@@ -0,0 +1,253 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, Daniel Jaouen <dcj24@cornell.edu>
|
||||
# (c) 2016, Indrajit Raychaudhuri <irc+code@indrajit.com>
|
||||
#
|
||||
# Based on homebrew (Andrew Dunham <andrew@du.nham.ca>)
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: homebrew_tap
|
||||
author:
|
||||
- "Indrajit Raychaudhuri (@indrajitr)"
|
||||
- "Daniel Jaouen (@danieljaouen)"
|
||||
short_description: Tap a Homebrew repository.
|
||||
description:
|
||||
- Tap external Homebrew repositories.
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The GitHub user/organization repository to tap.
|
||||
required: true
|
||||
aliases: ['tap']
|
||||
url:
|
||||
description:
|
||||
- The optional git URL of the repository to tap. The URL is not
|
||||
assumed to be on GitHub, and the protocol doesn't have to be HTTP.
|
||||
Any location and protocol that git can handle is fine.
|
||||
- I(name) option may not be a list of multiple taps (but a single
|
||||
tap instead) when this option is provided.
|
||||
required: false
|
||||
state:
|
||||
description:
|
||||
- state of the repository.
|
||||
choices: [ 'present', 'absent' ]
|
||||
required: false
|
||||
default: 'present'
|
||||
requirements: [ homebrew ]
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- homebrew_tap:
|
||||
name: homebrew/dupes
|
||||
|
||||
- homebrew_tap:
|
||||
name: homebrew/dupes
|
||||
state: absent
|
||||
|
||||
- homebrew_tap:
|
||||
name: homebrew/dupes,homebrew/science
|
||||
state: present
|
||||
|
||||
- homebrew_tap:
|
||||
name: telemachus/brew
|
||||
url: 'https://bitbucket.org/telemachus/brew'
|
||||
'''
|
||||
|
||||
import re
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def a_valid_tap(tap):
|
||||
'''Returns True if the tap is valid.'''
|
||||
regex = re.compile(r'^([\w-]+)/(homebrew-)?([\w-]+)$')
|
||||
return regex.match(tap)
|
||||
|
||||
|
||||
def already_tapped(module, brew_path, tap):
|
||||
'''Returns True if already tapped.'''
|
||||
|
||||
rc, out, err = module.run_command([
|
||||
brew_path,
|
||||
'tap',
|
||||
])
|
||||
|
||||
taps = [tap_.strip().lower() for tap_ in out.split('\n') if tap_]
|
||||
tap_name = re.sub('homebrew-', '', tap.lower())
|
||||
|
||||
return tap_name in taps
|
||||
|
||||
|
||||
def add_tap(module, brew_path, tap, url=None):
|
||||
'''Adds a single tap.'''
|
||||
failed, changed, msg = False, False, ''
|
||||
|
||||
if not a_valid_tap(tap):
|
||||
failed = True
|
||||
msg = 'not a valid tap: %s' % tap
|
||||
|
||||
elif not already_tapped(module, brew_path, tap):
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
|
||||
rc, out, err = module.run_command([
|
||||
brew_path,
|
||||
'tap',
|
||||
tap,
|
||||
url,
|
||||
])
|
||||
if rc == 0:
|
||||
changed = True
|
||||
msg = 'successfully tapped: %s' % tap
|
||||
else:
|
||||
failed = True
|
||||
msg = 'failed to tap: %s' % tap
|
||||
|
||||
else:
|
||||
msg = 'already tapped: %s' % tap
|
||||
|
||||
return (failed, changed, msg)
|
||||
|
||||
|
||||
def add_taps(module, brew_path, taps):
|
||||
'''Adds one or more taps.'''
|
||||
failed, unchanged, added, msg = False, 0, 0, ''
|
||||
|
||||
for tap in taps:
|
||||
(failed, changed, msg) = add_tap(module, brew_path, tap)
|
||||
if failed:
|
||||
break
|
||||
if changed:
|
||||
added += 1
|
||||
else:
|
||||
unchanged += 1
|
||||
|
||||
if failed:
|
||||
msg = 'added: %d, unchanged: %d, error: ' + msg
|
||||
msg = msg % (added, unchanged)
|
||||
elif added:
|
||||
changed = True
|
||||
msg = 'added: %d, unchanged: %d' % (added, unchanged)
|
||||
else:
|
||||
msg = 'added: %d, unchanged: %d' % (added, unchanged)
|
||||
|
||||
return (failed, changed, msg)
|
||||
|
||||
|
||||
def remove_tap(module, brew_path, tap):
|
||||
'''Removes a single tap.'''
|
||||
failed, changed, msg = False, False, ''
|
||||
|
||||
if not a_valid_tap(tap):
|
||||
failed = True
|
||||
msg = 'not a valid tap: %s' % tap
|
||||
|
||||
elif already_tapped(module, brew_path, tap):
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
|
||||
rc, out, err = module.run_command([
|
||||
brew_path,
|
||||
'untap',
|
||||
tap,
|
||||
])
|
||||
if not already_tapped(module, brew_path, tap):
|
||||
changed = True
|
||||
msg = 'successfully untapped: %s' % tap
|
||||
else:
|
||||
failed = True
|
||||
msg = 'failed to untap: %s' % tap
|
||||
|
||||
else:
|
||||
msg = 'already untapped: %s' % tap
|
||||
|
||||
return (failed, changed, msg)
|
||||
|
||||
|
||||
def remove_taps(module, brew_path, taps):
|
||||
'''Removes one or more taps.'''
|
||||
failed, unchanged, removed, msg = False, 0, 0, ''
|
||||
|
||||
for tap in taps:
|
||||
(failed, changed, msg) = remove_tap(module, brew_path, tap)
|
||||
if failed:
|
||||
break
|
||||
if changed:
|
||||
removed += 1
|
||||
else:
|
||||
unchanged += 1
|
||||
|
||||
if failed:
|
||||
msg = 'removed: %d, unchanged: %d, error: ' + msg
|
||||
msg = msg % (removed, unchanged)
|
||||
elif removed:
|
||||
changed = True
|
||||
msg = 'removed: %d, unchanged: %d' % (removed, unchanged)
|
||||
else:
|
||||
msg = 'removed: %d, unchanged: %d' % (removed, unchanged)
|
||||
|
||||
return (failed, changed, msg)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(aliases=['tap'], type='list', required=True),
|
||||
url=dict(default=None, required=False),
|
||||
state=dict(default='present', choices=['present', 'absent']),
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
brew_path = module.get_bin_path(
|
||||
'brew',
|
||||
required=True,
|
||||
opt_dirs=['/usr/local/bin']
|
||||
)
|
||||
|
||||
taps = module.params['name']
|
||||
url = module.params['url']
|
||||
|
||||
if module.params['state'] == 'present':
|
||||
if url is None:
|
||||
# No tap URL provided explicitly, continue with bulk addition
|
||||
# of all the taps.
|
||||
failed, changed, msg = add_taps(module, brew_path, taps)
|
||||
else:
|
||||
# When an tap URL is provided explicitly, we allow adding
|
||||
# *single* tap only. Validate and proceed to add single tap.
|
||||
if len(taps) > 1:
|
||||
msg = "List of multiple taps may not be provided with 'url' option."
|
||||
module.fail_json(msg=msg)
|
||||
else:
|
||||
failed, changed, msg = add_tap(module, brew_path, taps[0], url)
|
||||
|
||||
if failed:
|
||||
module.fail_json(msg=msg)
|
||||
else:
|
||||
module.exit_json(changed=changed, msg=msg)
|
||||
|
||||
elif module.params['state'] == 'absent':
|
||||
failed, changed, msg = remove_taps(module, brew_path, taps)
|
||||
|
||||
if failed:
|
||||
module.fail_json(msg=msg)
|
||||
else:
|
||||
module.exit_json(changed=changed, msg=msg)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
295
plugins/modules/packaging/os/installp.py
Normal file
295
plugins/modules/packaging/os/installp.py
Normal file
@@ -0,0 +1,295 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2017, Kairo Araujo <kairo@kairo.eti.br>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: installp
|
||||
author:
|
||||
- Kairo Araujo (@kairoaraujo)
|
||||
short_description: Manage packages on AIX
|
||||
description:
|
||||
- Manage packages using 'installp' on AIX
|
||||
options:
|
||||
accept_license:
|
||||
description:
|
||||
- Whether to accept the license for the package(s).
|
||||
type: bool
|
||||
default: no
|
||||
name:
|
||||
description:
|
||||
- One or more packages to install or remove.
|
||||
- Use C(all) to install all packages available on informed C(repository_path).
|
||||
type: list
|
||||
required: true
|
||||
aliases: [ pkg ]
|
||||
repository_path:
|
||||
description:
|
||||
- Path with AIX packages (required to install).
|
||||
type: path
|
||||
state:
|
||||
description:
|
||||
- Whether the package needs to be present on or absent from the system.
|
||||
type: str
|
||||
choices: [ absent, present ]
|
||||
default: present
|
||||
notes:
|
||||
- If the package is already installed, even the package/fileset is new, the module will not install it.
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: Install package foo
|
||||
installp:
|
||||
name: foo
|
||||
repository_path: /repository/AIX71/installp/base
|
||||
accept_license: yes
|
||||
state: present
|
||||
|
||||
- name: Install bos.sysmgt that includes bos.sysmgt.nim.master, bos.sysmgt.nim.spot
|
||||
installp:
|
||||
name: bos.sysmgt
|
||||
repository_path: /repository/AIX71/installp/base
|
||||
accept_license: yes
|
||||
state: present
|
||||
|
||||
- name: Install bos.sysmgt.nim.master only
|
||||
installp:
|
||||
name: bos.sysmgt.nim.master
|
||||
repository_path: /repository/AIX71/installp/base
|
||||
accept_license: yes
|
||||
state: present
|
||||
|
||||
- name: Install bos.sysmgt.nim.master and bos.sysmgt.nim.spot
|
||||
installp:
|
||||
name: bos.sysmgt.nim.master, bos.sysmgt.nim.spot
|
||||
repository_path: /repository/AIX71/installp/base
|
||||
accept_license: yes
|
||||
state: present
|
||||
|
||||
- name: Remove packages bos.sysmgt.nim.master
|
||||
installp:
|
||||
name: bos.sysmgt.nim.master
|
||||
state: absent
|
||||
'''
|
||||
|
||||
RETURN = r''' # '''
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def _check_new_pkg(module, package, repository_path):
|
||||
"""
|
||||
Check if the package of fileset is correct name and repository path.
|
||||
|
||||
:param module: Ansible module arguments spec.
|
||||
:param package: Package/fileset name.
|
||||
:param repository_path: Repository package path.
|
||||
:return: Bool, package information.
|
||||
"""
|
||||
|
||||
if os.path.isdir(repository_path):
|
||||
installp_cmd = module.get_bin_path('installp', True)
|
||||
rc, package_result, err = module.run_command("%s -l -MR -d %s" % (installp_cmd, repository_path))
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed to run installp.", rc=rc, err=err)
|
||||
|
||||
if package == 'all':
|
||||
pkg_info = "All packages on dir"
|
||||
return True, pkg_info
|
||||
|
||||
else:
|
||||
pkg_info = {}
|
||||
for line in package_result.splitlines():
|
||||
if re.findall(package, line):
|
||||
pkg_name = line.split()[0].strip()
|
||||
pkg_version = line.split()[1].strip()
|
||||
pkg_info[pkg_name] = pkg_version
|
||||
|
||||
return True, pkg_info
|
||||
|
||||
return False, None
|
||||
|
||||
else:
|
||||
module.fail_json(msg="Repository path %s is not valid." % repository_path)
|
||||
|
||||
|
||||
def _check_installed_pkg(module, package, repository_path):
|
||||
"""
|
||||
Check the package on AIX.
|
||||
It verifies if the package is installed and informations
|
||||
|
||||
:param module: Ansible module parameters spec.
|
||||
:param package: Package/fileset name.
|
||||
:param repository_path: Repository package path.
|
||||
:return: Bool, package data.
|
||||
"""
|
||||
|
||||
lslpp_cmd = module.get_bin_path('lslpp', True)
|
||||
rc, lslpp_result, err = module.run_command("%s -lcq %s*" % (lslpp_cmd, package))
|
||||
|
||||
if rc == 1:
|
||||
package_state = ' '.join(err.split()[-2:])
|
||||
if package_state == 'not installed.':
|
||||
return False, None
|
||||
else:
|
||||
module.fail_json(msg="Failed to run lslpp.", rc=rc, err=err)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed to run lslpp.", rc=rc, err=err)
|
||||
|
||||
pkg_data = {}
|
||||
full_pkg_data = lslpp_result.splitlines()
|
||||
for line in full_pkg_data:
|
||||
pkg_name, fileset, level = line.split(':')[0:3]
|
||||
pkg_data[pkg_name] = fileset, level
|
||||
|
||||
return True, pkg_data
|
||||
|
||||
|
||||
def remove(module, installp_cmd, packages):
|
||||
repository_path = None
|
||||
remove_count = 0
|
||||
removed_pkgs = []
|
||||
not_found_pkg = []
|
||||
for package in packages:
|
||||
pkg_check, dummy = _check_installed_pkg(module, package, repository_path)
|
||||
|
||||
if pkg_check:
|
||||
if not module.check_mode:
|
||||
rc, remove_out, err = module.run_command("%s -u %s" % (installp_cmd, package))
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed to run installp.", rc=rc, err=err)
|
||||
remove_count += 1
|
||||
removed_pkgs.append(package)
|
||||
|
||||
else:
|
||||
not_found_pkg.append(package)
|
||||
|
||||
if remove_count > 0:
|
||||
if len(not_found_pkg) > 1:
|
||||
not_found_pkg.insert(0, "Package(s) not found: ")
|
||||
|
||||
changed = True
|
||||
msg = "Packages removed: %s. %s " % (' '.join(removed_pkgs), ' '.join(not_found_pkg))
|
||||
|
||||
else:
|
||||
changed = False
|
||||
msg = ("No packages removed, all packages not found: %s" % ' '.join(not_found_pkg))
|
||||
|
||||
return changed, msg
|
||||
|
||||
|
||||
def install(module, installp_cmd, packages, repository_path, accept_license):
|
||||
installed_pkgs = []
|
||||
not_found_pkgs = []
|
||||
already_installed_pkgs = {}
|
||||
|
||||
accept_license_param = {
|
||||
True: '-Y',
|
||||
False: '',
|
||||
}
|
||||
|
||||
# Validate if package exists on repository path.
|
||||
for package in packages:
|
||||
pkg_check, pkg_data = _check_new_pkg(module, package, repository_path)
|
||||
|
||||
# If package exists on repository path, check if package is installed.
|
||||
if pkg_check:
|
||||
pkg_check_current, pkg_info = _check_installed_pkg(module, package, repository_path)
|
||||
|
||||
# If package is already installed.
|
||||
if pkg_check_current:
|
||||
# Check if package is a package and not a fileset, get version
|
||||
# and add the package into already installed list
|
||||
if package in pkg_info.keys():
|
||||
already_installed_pkgs[package] = pkg_info[package][1]
|
||||
|
||||
else:
|
||||
# If the package is not a package but a fileset, confirm
|
||||
# and add the fileset/package into already installed list
|
||||
for key in pkg_info.keys():
|
||||
if package in pkg_info[key]:
|
||||
already_installed_pkgs[package] = pkg_info[key][1]
|
||||
|
||||
else:
|
||||
if not module.check_mode:
|
||||
rc, out, err = module.run_command("%s -a %s -X -d %s %s" % (installp_cmd, accept_license_param[accept_license], repository_path, package))
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed to run installp", rc=rc, err=err)
|
||||
installed_pkgs.append(package)
|
||||
|
||||
else:
|
||||
not_found_pkgs.append(package)
|
||||
|
||||
if len(installed_pkgs) > 0:
|
||||
installed_msg = (" Installed: %s." % ' '.join(installed_pkgs))
|
||||
else:
|
||||
installed_msg = ''
|
||||
|
||||
if len(not_found_pkgs) > 0:
|
||||
not_found_msg = (" Not found: %s." % ' '.join(not_found_pkgs))
|
||||
else:
|
||||
not_found_msg = ''
|
||||
|
||||
if len(already_installed_pkgs) > 0:
|
||||
already_installed_msg = (" Already installed: %s." % already_installed_pkgs)
|
||||
else:
|
||||
already_installed_msg = ''
|
||||
|
||||
if len(installed_pkgs) > 0:
|
||||
changed = True
|
||||
msg = ("%s%s%s" % (installed_msg, not_found_msg, already_installed_msg))
|
||||
else:
|
||||
changed = False
|
||||
msg = ("No packages installed.%s%s%s" % (installed_msg, not_found_msg, already_installed_msg))
|
||||
|
||||
return changed, msg
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='list', required=True, aliases=['pkg']),
|
||||
repository_path=dict(type='path'),
|
||||
accept_license=dict(type='bool', default=False),
|
||||
state=dict(type='str', default='present', choices=['absent', 'present']),
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
name = module.params['name']
|
||||
repository_path = module.params['repository_path']
|
||||
accept_license = module.params['accept_license']
|
||||
state = module.params['state']
|
||||
|
||||
installp_cmd = module.get_bin_path('installp', True)
|
||||
|
||||
if state == 'present':
|
||||
if repository_path is None:
|
||||
module.fail_json(msg="repository_path is required to install package")
|
||||
|
||||
changed, msg = install(module, installp_cmd, name, repository_path, accept_license)
|
||||
|
||||
elif state == 'absent':
|
||||
changed, msg = remove(module, installp_cmd, name)
|
||||
|
||||
else:
|
||||
module.fail_json(changed=False, msg="Unexpected state.")
|
||||
|
||||
module.exit_json(changed=changed, msg=msg)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
269
plugins/modules/packaging/os/layman.py
Normal file
269
plugins/modules/packaging/os/layman.py
Normal file
@@ -0,0 +1,269 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2014, Jakub Jirutka <jakub@jirutka.cz>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: layman
|
||||
author: "Jakub Jirutka (@jirutka)"
|
||||
short_description: Manage Gentoo overlays
|
||||
description:
|
||||
- Uses Layman to manage an additional repositories for the Portage package manager on Gentoo Linux.
|
||||
Please note that Layman must be installed on a managed node prior using this module.
|
||||
requirements:
|
||||
- "python >= 2.6"
|
||||
- layman python module
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The overlay id to install, synchronize, or uninstall.
|
||||
Use 'ALL' to sync all of the installed overlays (can be used only when C(state=updated)).
|
||||
required: true
|
||||
list_url:
|
||||
description:
|
||||
- An URL of the alternative overlays list that defines the overlay to install.
|
||||
This list will be fetched and saved under C(${overlay_defs})/${name}.xml), where
|
||||
C(overlay_defs) is readed from the Layman's configuration.
|
||||
state:
|
||||
description:
|
||||
- Whether to install (C(present)), sync (C(updated)), or uninstall (C(absent)) the overlay.
|
||||
default: present
|
||||
choices: [present, absent, updated]
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be
|
||||
set to C(no) when no other option exists. Prior to 1.9.3 the code
|
||||
defaulted to C(no).
|
||||
type: bool
|
||||
default: 'yes'
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Install the overlay 'mozilla' which is on the central overlays list.
|
||||
- layman:
|
||||
name: mozilla
|
||||
|
||||
# Install the overlay 'cvut' from the specified alternative list.
|
||||
- layman:
|
||||
name: cvut
|
||||
list_url: 'http://raw.github.com/cvut/gentoo-overlay/master/overlay.xml'
|
||||
|
||||
# Update (sync) the overlay 'cvut', or install if not installed yet.
|
||||
- layman:
|
||||
name: cvut
|
||||
list_url: 'http://raw.github.com/cvut/gentoo-overlay/master/overlay.xml'
|
||||
state: updated
|
||||
|
||||
# Update (sync) all of the installed overlays.
|
||||
- layman:
|
||||
name: ALL
|
||||
state: updated
|
||||
|
||||
# Uninstall the overlay 'cvut'.
|
||||
- layman:
|
||||
name: cvut
|
||||
state: absent
|
||||
'''
|
||||
|
||||
import shutil
|
||||
import traceback
|
||||
|
||||
from os import path
|
||||
|
||||
LAYMAN_IMP_ERR = None
|
||||
try:
|
||||
from layman.api import LaymanAPI
|
||||
from layman.config import BareConfig
|
||||
HAS_LAYMAN_API = True
|
||||
except ImportError:
|
||||
LAYMAN_IMP_ERR = traceback.format_exc()
|
||||
HAS_LAYMAN_API = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
|
||||
|
||||
USERAGENT = 'ansible-httpget'
|
||||
|
||||
|
||||
class ModuleError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def init_layman(config=None):
|
||||
'''Returns the initialized ``LaymanAPI``.
|
||||
|
||||
:param config: the layman's configuration to use (optional)
|
||||
'''
|
||||
if config is None:
|
||||
config = BareConfig(read_configfile=True, quietness=1)
|
||||
return LaymanAPI(config)
|
||||
|
||||
|
||||
def download_url(module, url, dest):
|
||||
'''
|
||||
:param url: the URL to download
|
||||
:param dest: the absolute path of where to save the downloaded content to;
|
||||
it must be writable and not a directory
|
||||
|
||||
:raises ModuleError
|
||||
'''
|
||||
|
||||
# Hack to add params in the form that fetch_url expects
|
||||
module.params['http_agent'] = USERAGENT
|
||||
response, info = fetch_url(module, url)
|
||||
if info['status'] != 200:
|
||||
raise ModuleError("Failed to get %s: %s" % (url, info['msg']))
|
||||
|
||||
try:
|
||||
with open(dest, 'w') as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
except IOError as e:
|
||||
raise ModuleError("Failed to write: %s" % str(e))
|
||||
|
||||
|
||||
def install_overlay(module, name, list_url=None):
|
||||
'''Installs the overlay repository. If not on the central overlays list,
|
||||
then :list_url of an alternative list must be provided. The list will be
|
||||
fetched and saved under ``%(overlay_defs)/%(name.xml)`` (location of the
|
||||
``overlay_defs`` is read from the Layman's configuration).
|
||||
|
||||
:param name: the overlay id
|
||||
:param list_url: the URL of the remote repositories list to look for the overlay
|
||||
definition (optional, default: None)
|
||||
|
||||
:returns: True if the overlay was installed, or False if already exists
|
||||
(i.e. nothing has changed)
|
||||
:raises ModuleError
|
||||
'''
|
||||
# read Layman configuration
|
||||
layman_conf = BareConfig(read_configfile=True)
|
||||
layman = init_layman(layman_conf)
|
||||
|
||||
if layman.is_installed(name):
|
||||
return False
|
||||
|
||||
if module.check_mode:
|
||||
mymsg = 'Would add layman repo \'' + name + '\''
|
||||
module.exit_json(changed=True, msg=mymsg)
|
||||
|
||||
if not layman.is_repo(name):
|
||||
if not list_url:
|
||||
raise ModuleError("Overlay '%s' is not on the list of known "
|
||||
"overlays and URL of the remote list was not provided." % name)
|
||||
|
||||
overlay_defs = layman_conf.get_option('overlay_defs')
|
||||
dest = path.join(overlay_defs, name + '.xml')
|
||||
|
||||
download_url(module, list_url, dest)
|
||||
|
||||
# reload config
|
||||
layman = init_layman()
|
||||
|
||||
if not layman.add_repos(name):
|
||||
raise ModuleError(layman.get_errors())
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def uninstall_overlay(module, name):
|
||||
'''Uninstalls the given overlay repository from the system.
|
||||
|
||||
:param name: the overlay id to uninstall
|
||||
|
||||
:returns: True if the overlay was uninstalled, or False if doesn't exist
|
||||
(i.e. nothing has changed)
|
||||
:raises ModuleError
|
||||
'''
|
||||
layman = init_layman()
|
||||
|
||||
if not layman.is_installed(name):
|
||||
return False
|
||||
|
||||
if module.check_mode:
|
||||
mymsg = 'Would remove layman repo \'' + name + '\''
|
||||
module.exit_json(changed=True, msg=mymsg)
|
||||
|
||||
layman.delete_repos(name)
|
||||
if layman.get_errors():
|
||||
raise ModuleError(layman.get_errors())
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def sync_overlay(name):
|
||||
'''Synchronizes the specified overlay repository.
|
||||
|
||||
:param name: the overlay repository id to sync
|
||||
:raises ModuleError
|
||||
'''
|
||||
layman = init_layman()
|
||||
|
||||
if not layman.sync(name):
|
||||
messages = [str(item[1]) for item in layman.sync_results[2]]
|
||||
raise ModuleError(messages)
|
||||
|
||||
|
||||
def sync_overlays():
|
||||
'''Synchronize all of the installed overlays.
|
||||
|
||||
:raises ModuleError
|
||||
'''
|
||||
layman = init_layman()
|
||||
|
||||
for name in layman.get_installed():
|
||||
sync_overlay(name)
|
||||
|
||||
|
||||
def main():
|
||||
# define module
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(required=True),
|
||||
list_url=dict(aliases=['url']),
|
||||
state=dict(default="present", choices=['present', 'absent', 'updated']),
|
||||
validate_certs=dict(required=False, default=True, type='bool'),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if not HAS_LAYMAN_API:
|
||||
module.fail_json(msg=missing_required_lib('Layman'), exception=LAYMAN_IMP_ERR)
|
||||
|
||||
state, name, url = (module.params[key] for key in ['state', 'name', 'list_url'])
|
||||
|
||||
changed = False
|
||||
try:
|
||||
if state == 'present':
|
||||
changed = install_overlay(module, name, url)
|
||||
|
||||
elif state == 'updated':
|
||||
if name == 'ALL':
|
||||
sync_overlays()
|
||||
elif install_overlay(module, name, url):
|
||||
changed = True
|
||||
else:
|
||||
sync_overlay(name)
|
||||
else:
|
||||
changed = uninstall_overlay(module, name)
|
||||
|
||||
except ModuleError as e:
|
||||
module.fail_json(msg=e.message)
|
||||
else:
|
||||
module.exit_json(changed=changed, name=name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
309
plugins/modules/packaging/os/macports.py
Normal file
309
plugins/modules/packaging/os/macports.py
Normal file
@@ -0,0 +1,309 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, Jimmy Tang <jcftang@gmail.com>
|
||||
# Based on okpg (Patrick Pelletier <pp.pelletier@gmail.com>), pacman
|
||||
# (Afterburn) and pkgin (Shaun Zinck) modules
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: macports
|
||||
author: "Jimmy Tang (@jcftang)"
|
||||
short_description: Package manager for MacPorts
|
||||
description:
|
||||
- Manages MacPorts packages (ports)
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- A list of port names.
|
||||
aliases: ['port']
|
||||
type: list
|
||||
elements: str
|
||||
selfupdate:
|
||||
description:
|
||||
- Update Macports and the ports tree, either prior to installing ports or as a separate step.
|
||||
- Equivalent to running C(port selfupdate).
|
||||
aliases: ['update_cache', 'update_ports']
|
||||
default: "no"
|
||||
type: bool
|
||||
state:
|
||||
description:
|
||||
- Indicates the desired state of the port.
|
||||
choices: [ 'present', 'absent', 'active', 'inactive' ]
|
||||
default: present
|
||||
upgrade:
|
||||
description:
|
||||
- Upgrade all outdated ports, either prior to installing ports or as a separate step.
|
||||
- Equivalent to running C(port upgrade outdated).
|
||||
default: "no"
|
||||
type: bool
|
||||
variant:
|
||||
description:
|
||||
- A port variant specification.
|
||||
- 'C(variant) is only supported with state: I(installed)/I(present).'
|
||||
aliases: ['variants']
|
||||
'''
|
||||
EXAMPLES = '''
|
||||
- name: Install the foo port
|
||||
macports:
|
||||
name: foo
|
||||
|
||||
- name: Install the universal, x11 variant of the foo port
|
||||
macports:
|
||||
name: foo
|
||||
variant: +universal+x11
|
||||
|
||||
- name: Install a list of ports
|
||||
macports:
|
||||
name: "{{ ports }}"
|
||||
vars:
|
||||
ports:
|
||||
- foo
|
||||
- foo-tools
|
||||
|
||||
- name: Update Macports and the ports tree, then upgrade all outdated ports
|
||||
macports:
|
||||
selfupdate: yes
|
||||
upgrade: yes
|
||||
|
||||
- name: Update Macports and the ports tree, then install the foo port
|
||||
macports:
|
||||
name: foo
|
||||
selfupdate: yes
|
||||
|
||||
- name: Remove the foo port
|
||||
macports:
|
||||
name: foo
|
||||
state: absent
|
||||
|
||||
- name: Activate the foo port
|
||||
macports:
|
||||
name: foo
|
||||
state: active
|
||||
|
||||
- name: Deactivate the foo port
|
||||
macports:
|
||||
name: foo
|
||||
state: inactive
|
||||
'''
|
||||
|
||||
import re
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six.moves import shlex_quote
|
||||
|
||||
|
||||
def selfupdate(module, port_path):
|
||||
""" Update Macports and the ports tree. """
|
||||
|
||||
rc, out, err = module.run_command("%s -v selfupdate" % port_path)
|
||||
|
||||
if rc == 0:
|
||||
updated = any(
|
||||
re.search(r'Total number of ports parsed:\s+[^0]', s.strip()) or
|
||||
re.search(r'Installing new Macports release', s.strip())
|
||||
for s in out.split('\n')
|
||||
if s
|
||||
)
|
||||
if updated:
|
||||
changed = True
|
||||
msg = "Macports updated successfully"
|
||||
else:
|
||||
changed = False
|
||||
msg = "Macports already up-to-date"
|
||||
|
||||
return (changed, msg)
|
||||
else:
|
||||
module.fail_json(msg="Failed to update Macports", stdout=out, stderr=err)
|
||||
|
||||
|
||||
def upgrade(module, port_path):
|
||||
""" Upgrade outdated ports. """
|
||||
|
||||
rc, out, err = module.run_command("%s upgrade outdated" % port_path)
|
||||
|
||||
# rc is 1 when nothing to upgrade so check stdout first.
|
||||
if out.strip() == "Nothing to upgrade.":
|
||||
changed = False
|
||||
msg = "Ports already upgraded"
|
||||
return (changed, msg)
|
||||
elif rc == 0:
|
||||
changed = True
|
||||
msg = "Outdated ports upgraded successfully"
|
||||
return (changed, msg)
|
||||
else:
|
||||
module.fail_json(msg="Failed to upgrade outdated ports", stdout=out, stderr=err)
|
||||
|
||||
|
||||
def query_port(module, port_path, name, state="present"):
|
||||
""" Returns whether a port is installed or not. """
|
||||
|
||||
if state == "present":
|
||||
|
||||
rc, out, err = module.run_command("%s installed | grep -q ^.*%s" % (shlex_quote(port_path), shlex_quote(name)), use_unsafe_shell=True)
|
||||
if rc == 0:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
elif state == "active":
|
||||
|
||||
rc, out, err = module.run_command("%s installed %s | grep -q active" % (shlex_quote(port_path), shlex_quote(name)), use_unsafe_shell=True)
|
||||
|
||||
if rc == 0:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def remove_ports(module, port_path, ports):
|
||||
""" Uninstalls one or more ports if installed. """
|
||||
|
||||
remove_c = 0
|
||||
# Using a for loop in case of error, we can report the port that failed
|
||||
for port in ports:
|
||||
# Query the port first, to see if we even need to remove
|
||||
if not query_port(module, port_path, port):
|
||||
continue
|
||||
|
||||
rc, out, err = module.run_command("%s uninstall %s" % (port_path, port))
|
||||
|
||||
if query_port(module, port_path, port):
|
||||
module.fail_json(msg="Failed to remove %s: %s" % (port, err))
|
||||
|
||||
remove_c += 1
|
||||
|
||||
if remove_c > 0:
|
||||
|
||||
module.exit_json(changed=True, msg="Removed %s port(s)" % remove_c)
|
||||
|
||||
module.exit_json(changed=False, msg="Port(s) already absent")
|
||||
|
||||
|
||||
def install_ports(module, port_path, ports, variant):
|
||||
""" Installs one or more ports if not already installed. """
|
||||
|
||||
install_c = 0
|
||||
|
||||
for port in ports:
|
||||
if query_port(module, port_path, port):
|
||||
continue
|
||||
|
||||
rc, out, err = module.run_command("%s install %s %s" % (port_path, port, variant))
|
||||
|
||||
if not query_port(module, port_path, port):
|
||||
module.fail_json(msg="Failed to install %s: %s" % (port, err))
|
||||
|
||||
install_c += 1
|
||||
|
||||
if install_c > 0:
|
||||
module.exit_json(changed=True, msg="Installed %s port(s)" % (install_c))
|
||||
|
||||
module.exit_json(changed=False, msg="Port(s) already present")
|
||||
|
||||
|
||||
def activate_ports(module, port_path, ports):
|
||||
""" Activate a port if it's inactive. """
|
||||
|
||||
activate_c = 0
|
||||
|
||||
for port in ports:
|
||||
if not query_port(module, port_path, port):
|
||||
module.fail_json(msg="Failed to activate %s, port(s) not present" % (port))
|
||||
|
||||
if query_port(module, port_path, port, state="active"):
|
||||
continue
|
||||
|
||||
rc, out, err = module.run_command("%s activate %s" % (port_path, port))
|
||||
|
||||
if not query_port(module, port_path, port, state="active"):
|
||||
module.fail_json(msg="Failed to activate %s: %s" % (port, err))
|
||||
|
||||
activate_c += 1
|
||||
|
||||
if activate_c > 0:
|
||||
module.exit_json(changed=True, msg="Activated %s port(s)" % (activate_c))
|
||||
|
||||
module.exit_json(changed=False, msg="Port(s) already active")
|
||||
|
||||
|
||||
def deactivate_ports(module, port_path, ports):
|
||||
""" Deactivate a port if it's active. """
|
||||
|
||||
deactivated_c = 0
|
||||
|
||||
for port in ports:
|
||||
if not query_port(module, port_path, port):
|
||||
module.fail_json(msg="Failed to deactivate %s, port(s) not present" % (port))
|
||||
|
||||
if not query_port(module, port_path, port, state="active"):
|
||||
continue
|
||||
|
||||
rc, out, err = module.run_command("%s deactivate %s" % (port_path, port))
|
||||
|
||||
if query_port(module, port_path, port, state="active"):
|
||||
module.fail_json(msg="Failed to deactivate %s: %s" % (port, err))
|
||||
|
||||
deactivated_c += 1
|
||||
|
||||
if deactivated_c > 0:
|
||||
module.exit_json(changed=True, msg="Deactivated %s port(s)" % (deactivated_c))
|
||||
|
||||
module.exit_json(changed=False, msg="Port(s) already inactive")
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='list', elements='str', aliases=["port"]),
|
||||
selfupdate=dict(aliases=["update_cache", "update_ports"], default=False, type='bool'),
|
||||
state=dict(default="present", choices=["present", "installed", "absent", "removed", "active", "inactive"]),
|
||||
upgrade=dict(default=False, type='bool'),
|
||||
variant=dict(aliases=["variants"], default=None, type='str')
|
||||
)
|
||||
)
|
||||
|
||||
port_path = module.get_bin_path('port', True, ['/opt/local/bin'])
|
||||
|
||||
p = module.params
|
||||
|
||||
if p["selfupdate"]:
|
||||
(changed, msg) = selfupdate(module, port_path)
|
||||
if not (p["name"] or p["upgrade"]):
|
||||
module.exit_json(changed=changed, msg=msg)
|
||||
|
||||
if p["upgrade"]:
|
||||
(changed, msg) = upgrade(module, port_path)
|
||||
if not p["name"]:
|
||||
module.exit_json(changed=changed, msg=msg)
|
||||
|
||||
pkgs = p["name"]
|
||||
|
||||
variant = p["variant"]
|
||||
|
||||
if p["state"] in ["present", "installed"]:
|
||||
install_ports(module, port_path, pkgs, variant)
|
||||
|
||||
elif p["state"] in ["absent", "removed"]:
|
||||
remove_ports(module, port_path, pkgs)
|
||||
|
||||
elif p["state"] == "active":
|
||||
activate_ports(module, port_path, pkgs)
|
||||
|
||||
elif p["state"] == "inactive":
|
||||
deactivate_ports(module, port_path, pkgs)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
289
plugins/modules/packaging/os/mas.py
Normal file
289
plugins/modules/packaging/os/mas.py
Normal file
@@ -0,0 +1,289 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2020, Lukas Bestle <project-ansible@lukasbestle.com>
|
||||
# Copyright: (c) 2017, Michael Heap <m@michaelheap.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
module: mas
|
||||
short_description: Manage Mac App Store applications with mas-cli
|
||||
description:
|
||||
- Installs, uninstalls and updates macOS applications from the Mac App Store using the C(mas-cli).
|
||||
author:
|
||||
- Michael Heap (@mheap)
|
||||
- Lukas Bestle (@lukasbestle)
|
||||
options:
|
||||
id:
|
||||
description:
|
||||
- The Mac App Store identifier of the app(s) you want to manage.
|
||||
- This can be found by running C(mas search APP_NAME) on your machine.
|
||||
type: list
|
||||
elements: int
|
||||
state:
|
||||
description:
|
||||
- Desired state of the app installation.
|
||||
- The C(absent) value requires root permissions, also see the examples.
|
||||
type: str
|
||||
choices:
|
||||
- absent
|
||||
- latest
|
||||
- present
|
||||
default: present
|
||||
upgrade_all:
|
||||
description:
|
||||
- Upgrade all installed Mac App Store apps.
|
||||
type: bool
|
||||
default: "no"
|
||||
aliases: ["upgrade"]
|
||||
requirements:
|
||||
- macOS 10.11+
|
||||
- "mas-cli (U(https://github.com/mas-cli/mas)) 1.5.0+ available as C(mas) in the bin path"
|
||||
- The Apple ID to use already needs to be signed in to the Mac App Store (check with C(mas account)).
|
||||
notes:
|
||||
- This module supports C(check_mode).
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Install Keynote
|
||||
mas:
|
||||
id: 409183694
|
||||
state: present
|
||||
|
||||
- name: Install a list of apps
|
||||
mas:
|
||||
id:
|
||||
- 409183694 # Keynote
|
||||
- 413857545 # Divvy
|
||||
state: present
|
||||
|
||||
- name: Ensure the latest Keynote version is installed
|
||||
mas:
|
||||
id: 409183694
|
||||
state: latest
|
||||
|
||||
- name: Upgrade all installed Mac App Store apps
|
||||
mas:
|
||||
upgrade_all: yes
|
||||
|
||||
- name: Install specific apps and also upgrade all others
|
||||
mas:
|
||||
id:
|
||||
- 409183694 # Keynote
|
||||
- 413857545 # Divvy
|
||||
state: present
|
||||
upgrade_all: yes
|
||||
|
||||
- name: Uninstall Divvy
|
||||
mas:
|
||||
id: 413857545
|
||||
state: absent
|
||||
become: yes # Uninstallation requires root permissions
|
||||
'''
|
||||
|
||||
RETURN = r''' # '''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
from distutils.version import StrictVersion
|
||||
import os
|
||||
|
||||
|
||||
class Mas(object):
|
||||
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
|
||||
# Initialize data properties
|
||||
self.mas_path = self.module.get_bin_path('mas')
|
||||
self._checked_signin = False
|
||||
self._installed = None # Populated only if needed
|
||||
self._outdated = None # Populated only if needed
|
||||
self.count_install = 0
|
||||
self.count_upgrade = 0
|
||||
self.count_uninstall = 0
|
||||
self.result = {
|
||||
'changed': False
|
||||
}
|
||||
|
||||
self.check_mas_tool()
|
||||
|
||||
def app_command(self, command, id):
|
||||
''' Runs a `mas` command on a given app; command can be 'install', 'upgrade' or 'uninstall' '''
|
||||
|
||||
if not self.module.check_mode:
|
||||
if command != 'uninstall':
|
||||
self.check_signin()
|
||||
|
||||
rc, out, err = self.run([command, str(id)])
|
||||
if rc != 0:
|
||||
self.module.fail_json(
|
||||
msg="Error running command '{0}' on app '{1}': {2}".format(command, str(id), out.rstrip())
|
||||
)
|
||||
|
||||
# No error or dry run
|
||||
self.__dict__['count_' + command] += 1
|
||||
|
||||
def check_mas_tool(self):
|
||||
''' Verifies that the `mas` tool is available in a recent version '''
|
||||
|
||||
# Is the `mas` tool available at all?
|
||||
if not self.mas_path:
|
||||
self.module.fail_json(msg='Required `mas` tool is not installed')
|
||||
|
||||
# Is the version recent enough?
|
||||
rc, out, err = self.run(['version'])
|
||||
if rc != 0 or not out.strip() or StrictVersion(out.strip()) < StrictVersion('1.5.0'):
|
||||
self.module.fail_json(msg='`mas` tool in version 1.5.0+ needed, got ' + out.strip())
|
||||
|
||||
def check_signin(self):
|
||||
''' Verifies that the user is signed in to the Mac App Store '''
|
||||
|
||||
# Only check this once per execution
|
||||
if self._checked_signin:
|
||||
return
|
||||
|
||||
rc, out, err = self.run(['account'])
|
||||
if out.split("\n", 1)[0].rstrip() == 'Not signed in':
|
||||
self.module.fail_json(msg='You must be signed in to the Mac App Store')
|
||||
|
||||
self._checked_signin = True
|
||||
|
||||
def exit(self):
|
||||
''' Exit with the data we have collected over time '''
|
||||
|
||||
msgs = []
|
||||
if self.count_install > 0:
|
||||
msgs.append('Installed {0} app(s)'.format(self.count_install))
|
||||
if self.count_upgrade > 0:
|
||||
msgs.append('Upgraded {0} app(s)'.format(self.count_upgrade))
|
||||
if self.count_uninstall > 0:
|
||||
msgs.append('Uninstalled {0} app(s)'.format(self.count_uninstall))
|
||||
|
||||
if msgs:
|
||||
self.result['changed'] = True
|
||||
self.result['msg'] = ', '.join(msgs)
|
||||
|
||||
self.module.exit_json(**self.result)
|
||||
|
||||
def get_current_state(self, command):
|
||||
''' Returns the list of all app IDs; command can either be 'list' or 'outdated' '''
|
||||
|
||||
rc, raw_apps, err = self.run([command])
|
||||
rows = raw_apps.split("\n")
|
||||
apps = []
|
||||
for r in rows:
|
||||
# Format: "123456789 App Name"
|
||||
r = r.split(' ', 1)
|
||||
if len(r) == 2:
|
||||
apps.append(int(r[0]))
|
||||
|
||||
return apps
|
||||
|
||||
def installed(self):
|
||||
''' Returns the list of installed apps '''
|
||||
|
||||
# Populate cache if not already done
|
||||
if self._installed is None:
|
||||
self._installed = self.get_current_state('list')
|
||||
|
||||
return self._installed
|
||||
|
||||
def is_installed(self, id):
|
||||
''' Checks whether the given app is installed '''
|
||||
|
||||
return int(id) in self.installed()
|
||||
|
||||
def is_outdated(self, id):
|
||||
''' Checks whether the given app is installed, but outdated '''
|
||||
|
||||
return int(id) in self.outdated()
|
||||
|
||||
def outdated(self):
|
||||
''' Returns the list of installed, but outdated apps '''
|
||||
|
||||
# Populate cache if not already done
|
||||
if self._outdated is None:
|
||||
self._outdated = self.get_current_state('outdated')
|
||||
|
||||
return self._outdated
|
||||
|
||||
def run(self, cmd):
|
||||
''' Runs a command of the `mas` tool '''
|
||||
|
||||
cmd.insert(0, self.mas_path)
|
||||
return self.module.run_command(cmd, False)
|
||||
|
||||
def upgrade_all(self):
|
||||
''' Upgrades all installed apps and sets the correct result data '''
|
||||
|
||||
outdated = self.outdated()
|
||||
|
||||
if not self.module.check_mode:
|
||||
self.check_signin()
|
||||
|
||||
rc, out, err = self.run(['upgrade'])
|
||||
if rc != 0:
|
||||
self.module.fail_json(msg='Could not upgrade all apps: ' + out.rstrip())
|
||||
|
||||
self.count_upgrade += len(outdated)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
id=dict(type='list', elements='int'),
|
||||
state=dict(type='str', default='present', choices=['absent', 'latest', 'present']),
|
||||
upgrade_all=dict(type='bool', default=False, aliases=['upgrade']),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
mas = Mas(module)
|
||||
|
||||
if module.params['id']:
|
||||
apps = module.params['id']
|
||||
else:
|
||||
apps = []
|
||||
|
||||
state = module.params['state']
|
||||
upgrade = module.params['upgrade_all']
|
||||
|
||||
# Run operations on the given app IDs
|
||||
for app in sorted(set(apps)):
|
||||
if state == 'present':
|
||||
if not mas.is_installed(app):
|
||||
mas.app_command('install', app)
|
||||
|
||||
elif state == 'absent':
|
||||
if mas.is_installed(app):
|
||||
# Ensure we are root
|
||||
if os.getuid() != 0:
|
||||
module.fail_json(msg="Uninstalling apps requires root permissions ('become: yes')")
|
||||
|
||||
mas.app_command('uninstall', app)
|
||||
|
||||
elif state == 'latest':
|
||||
if not mas.is_installed(app):
|
||||
mas.app_command('install', app)
|
||||
elif mas.is_outdated(app):
|
||||
mas.app_command('upgrade', app)
|
||||
|
||||
# Upgrade all apps if requested
|
||||
mas._outdated = None # Clear cache
|
||||
if upgrade and mas.outdated():
|
||||
mas.upgrade_all()
|
||||
|
||||
# Exit with the collected data
|
||||
mas.exit()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
634
plugins/modules/packaging/os/openbsd_pkg.py
Normal file
634
plugins/modules/packaging/os/openbsd_pkg.py
Normal file
@@ -0,0 +1,634 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, Patrik Lundin <patrik@sigterm.se>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: openbsd_pkg
|
||||
author:
|
||||
- Patrik Lundin (@eest)
|
||||
short_description: Manage packages on OpenBSD
|
||||
description:
|
||||
- Manage packages on OpenBSD using the pkg tools.
|
||||
requirements:
|
||||
- python >= 2.5
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- A name or a list of names of the packages.
|
||||
required: yes
|
||||
state:
|
||||
description:
|
||||
- C(present) will make sure the package is installed.
|
||||
C(latest) will make sure the latest version of the package is installed.
|
||||
C(absent) will make sure the specified package is not installed.
|
||||
choices: [ absent, latest, present ]
|
||||
default: present
|
||||
build:
|
||||
description:
|
||||
- Build the package from source instead of downloading and installing
|
||||
a binary. Requires that the port source tree is already installed.
|
||||
Automatically builds and installs the 'sqlports' package, if it is
|
||||
not already installed.
|
||||
type: bool
|
||||
default: 'no'
|
||||
ports_dir:
|
||||
description:
|
||||
- When used in combination with the C(build) option, allows overriding
|
||||
the default ports source directory.
|
||||
default: /usr/ports
|
||||
clean:
|
||||
description:
|
||||
- When updating or removing packages, delete the extra configuration
|
||||
file(s) in the old packages which are annotated with @extra in
|
||||
the packaging-list.
|
||||
type: bool
|
||||
default: 'no'
|
||||
quick:
|
||||
description:
|
||||
- Replace or delete packages quickly; do not bother with checksums
|
||||
before removing normal files.
|
||||
type: bool
|
||||
default: 'no'
|
||||
notes:
|
||||
- When used with a `loop:` each package will be processed individually,
|
||||
it is much more efficient to pass the list directly to the `name` option.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Make sure nmap is installed
|
||||
openbsd_pkg:
|
||||
name: nmap
|
||||
state: present
|
||||
|
||||
- name: Make sure nmap is the latest version
|
||||
openbsd_pkg:
|
||||
name: nmap
|
||||
state: latest
|
||||
|
||||
- name: Make sure nmap is not installed
|
||||
openbsd_pkg:
|
||||
name: nmap
|
||||
state: absent
|
||||
|
||||
- name: Make sure nmap is installed, build it from source if it is not
|
||||
openbsd_pkg:
|
||||
name: nmap
|
||||
state: present
|
||||
build: yes
|
||||
|
||||
- name: Specify a pkg flavour with '--'
|
||||
openbsd_pkg:
|
||||
name: vim--no_x11
|
||||
state: present
|
||||
|
||||
- name: Specify the default flavour to avoid ambiguity errors
|
||||
openbsd_pkg:
|
||||
name: vim--
|
||||
state: present
|
||||
|
||||
- name: Specify a package branch (requires at least OpenBSD 6.0)
|
||||
openbsd_pkg:
|
||||
name: python%3.5
|
||||
state: present
|
||||
|
||||
- name: Update all packages on the system
|
||||
openbsd_pkg:
|
||||
name: '*'
|
||||
state: latest
|
||||
|
||||
- name: Purge a package and it's configuration files
|
||||
openbsd_pkg:
|
||||
name: mpd
|
||||
clean: yes
|
||||
state: absent
|
||||
|
||||
- name: Quickly remove a package without checking checksums
|
||||
openbsd_pkg:
|
||||
name: qt5
|
||||
quick: yes
|
||||
state: absent
|
||||
'''
|
||||
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shlex
|
||||
import sqlite3
|
||||
|
||||
from distutils.version import StrictVersion
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
# Function used for executing commands.
|
||||
def execute_command(cmd, module):
|
||||
# Break command line into arguments.
|
||||
# This makes run_command() use shell=False which we need to not cause shell
|
||||
# expansion of special characters like '*'.
|
||||
cmd_args = shlex.split(cmd)
|
||||
return module.run_command(cmd_args)
|
||||
|
||||
|
||||
# Function used to find out if a package is currently installed.
|
||||
def get_package_state(names, pkg_spec, module):
|
||||
info_cmd = 'pkg_info -Iq'
|
||||
|
||||
for name in names:
|
||||
command = "%s inst:%s" % (info_cmd, name)
|
||||
|
||||
rc, stdout, stderr = execute_command(command, module)
|
||||
|
||||
if stderr:
|
||||
module.fail_json(msg="failed in get_package_state(): " + stderr)
|
||||
|
||||
if stdout:
|
||||
# If the requested package name is just a stem, like "python", we may
|
||||
# find multiple packages with that name.
|
||||
pkg_spec[name]['installed_names'] = [installed_name for installed_name in stdout.splitlines()]
|
||||
module.debug("get_package_state(): installed_names = %s" % pkg_spec[name]['installed_names'])
|
||||
pkg_spec[name]['installed_state'] = True
|
||||
else:
|
||||
pkg_spec[name]['installed_state'] = False
|
||||
|
||||
|
||||
# Function used to make sure a package is present.
|
||||
def package_present(names, pkg_spec, module):
|
||||
build = module.params['build']
|
||||
|
||||
for name in names:
|
||||
# It is possible package_present() has been called from package_latest().
|
||||
# In that case we do not want to operate on the whole list of names,
|
||||
# only the leftovers.
|
||||
if pkg_spec['package_latest_leftovers']:
|
||||
if name not in pkg_spec['package_latest_leftovers']:
|
||||
module.debug("package_present(): ignoring '%s' which is not a package_latest() leftover" % name)
|
||||
continue
|
||||
else:
|
||||
module.debug("package_present(): handling package_latest() leftovers, installing '%s'" % name)
|
||||
|
||||
if module.check_mode:
|
||||
install_cmd = 'pkg_add -Imn'
|
||||
else:
|
||||
if build is True:
|
||||
port_dir = "%s/%s" % (module.params['ports_dir'], get_package_source_path(name, pkg_spec, module))
|
||||
if os.path.isdir(port_dir):
|
||||
if pkg_spec[name]['flavor']:
|
||||
flavors = pkg_spec[name]['flavor'].replace('-', ' ')
|
||||
install_cmd = "cd %s && make clean=depends && FLAVOR=\"%s\" make install && make clean=depends" % (port_dir, flavors)
|
||||
elif pkg_spec[name]['subpackage']:
|
||||
install_cmd = "cd %s && make clean=depends && SUBPACKAGE=\"%s\" make install && make clean=depends" % (port_dir,
|
||||
pkg_spec[name]['subpackage'])
|
||||
else:
|
||||
install_cmd = "cd %s && make install && make clean=depends" % (port_dir)
|
||||
else:
|
||||
module.fail_json(msg="the port source directory %s does not exist" % (port_dir))
|
||||
else:
|
||||
install_cmd = 'pkg_add -Im'
|
||||
|
||||
if pkg_spec[name]['installed_state'] is False:
|
||||
|
||||
# Attempt to install the package
|
||||
if build is True and not module.check_mode:
|
||||
(pkg_spec[name]['rc'], pkg_spec[name]['stdout'], pkg_spec[name]['stderr']) = module.run_command(install_cmd, module, use_unsafe_shell=True)
|
||||
else:
|
||||
(pkg_spec[name]['rc'], pkg_spec[name]['stdout'], pkg_spec[name]['stderr']) = execute_command("%s %s" % (install_cmd, name), module)
|
||||
|
||||
# The behaviour of pkg_add is a bit different depending on if a
|
||||
# specific version is supplied or not.
|
||||
#
|
||||
# When a specific version is supplied the return code will be 0 when
|
||||
# a package is found and 1 when it is not. If a version is not
|
||||
# supplied the tool will exit 0 in both cases.
|
||||
#
|
||||
# It is important to note that "version" relates to the
|
||||
# packages-specs(7) notion of a version. If using the branch syntax
|
||||
# (like "python%3.5") even though a branch name may look like a
|
||||
# version string it is not used an one by pkg_add.
|
||||
if pkg_spec[name]['version'] or build is True:
|
||||
# Depend on the return code.
|
||||
module.debug("package_present(): depending on return code for name '%s'" % name)
|
||||
if pkg_spec[name]['rc']:
|
||||
pkg_spec[name]['changed'] = False
|
||||
else:
|
||||
# Depend on stderr instead.
|
||||
module.debug("package_present(): depending on stderr for name '%s'" % name)
|
||||
if pkg_spec[name]['stderr']:
|
||||
# There is a corner case where having an empty directory in
|
||||
# installpath prior to the right location will result in a
|
||||
# "file:/local/package/directory/ is empty" message on stderr
|
||||
# while still installing the package, so we need to look for
|
||||
# for a message like "packagename-1.0: ok" just in case.
|
||||
match = re.search(r"\W%s-[^:]+: ok\W" % pkg_spec[name]['stem'], pkg_spec[name]['stdout'])
|
||||
|
||||
if match:
|
||||
# It turns out we were able to install the package.
|
||||
module.debug("package_present(): we were able to install package for name '%s'" % name)
|
||||
else:
|
||||
# We really did fail, fake the return code.
|
||||
module.debug("package_present(): we really did fail for name '%s'" % name)
|
||||
pkg_spec[name]['rc'] = 1
|
||||
pkg_spec[name]['changed'] = False
|
||||
else:
|
||||
module.debug("package_present(): stderr was not set for name '%s'" % name)
|
||||
|
||||
if pkg_spec[name]['rc'] == 0:
|
||||
pkg_spec[name]['changed'] = True
|
||||
|
||||
else:
|
||||
pkg_spec[name]['rc'] = 0
|
||||
pkg_spec[name]['stdout'] = ''
|
||||
pkg_spec[name]['stderr'] = ''
|
||||
pkg_spec[name]['changed'] = False
|
||||
|
||||
|
||||
# Function used to make sure a package is the latest available version.
|
||||
def package_latest(names, pkg_spec, module):
|
||||
if module.params['build'] is True:
|
||||
module.fail_json(msg="the combination of build=%s and state=latest is not supported" % module.params['build'])
|
||||
|
||||
upgrade_cmd = 'pkg_add -um'
|
||||
|
||||
if module.check_mode:
|
||||
upgrade_cmd += 'n'
|
||||
|
||||
if module.params['clean']:
|
||||
upgrade_cmd += 'c'
|
||||
|
||||
if module.params['quick']:
|
||||
upgrade_cmd += 'q'
|
||||
|
||||
for name in names:
|
||||
if pkg_spec[name]['installed_state'] is True:
|
||||
|
||||
# Attempt to upgrade the package.
|
||||
(pkg_spec[name]['rc'], pkg_spec[name]['stdout'], pkg_spec[name]['stderr']) = execute_command("%s %s" % (upgrade_cmd, name), module)
|
||||
|
||||
# Look for output looking something like "nmap-6.01->6.25: ok" to see if
|
||||
# something changed (or would have changed). Use \W to delimit the match
|
||||
# from progress meter output.
|
||||
pkg_spec[name]['changed'] = False
|
||||
for installed_name in pkg_spec[name]['installed_names']:
|
||||
module.debug("package_latest(): checking for pre-upgrade package name: %s" % installed_name)
|
||||
match = re.search(r"\W%s->.+: ok\W" % installed_name, pkg_spec[name]['stdout'])
|
||||
if match:
|
||||
module.debug("package_latest(): pre-upgrade package name match: %s" % installed_name)
|
||||
|
||||
pkg_spec[name]['changed'] = True
|
||||
break
|
||||
|
||||
# FIXME: This part is problematic. Based on the issues mentioned (and
|
||||
# handled) in package_present() it is not safe to blindly trust stderr
|
||||
# as an indicator that the command failed, and in the case with
|
||||
# empty installpath directories this will break.
|
||||
#
|
||||
# For now keep this safeguard here, but ignore it if we managed to
|
||||
# parse out a successful update above. This way we will report a
|
||||
# successful run when we actually modify something but fail
|
||||
# otherwise.
|
||||
if pkg_spec[name]['changed'] is not True:
|
||||
if pkg_spec[name]['stderr']:
|
||||
pkg_spec[name]['rc'] = 1
|
||||
|
||||
else:
|
||||
# Note packages that need to be handled by package_present
|
||||
module.debug("package_latest(): package '%s' is not installed, will be handled by package_present()" % name)
|
||||
pkg_spec['package_latest_leftovers'].append(name)
|
||||
|
||||
# If there were any packages that were not installed we call
|
||||
# package_present() which will handle those.
|
||||
if pkg_spec['package_latest_leftovers']:
|
||||
module.debug("package_latest(): calling package_present() to handle leftovers")
|
||||
package_present(names, pkg_spec, module)
|
||||
|
||||
|
||||
# Function used to make sure a package is not installed.
|
||||
def package_absent(names, pkg_spec, module):
|
||||
remove_cmd = 'pkg_delete -I'
|
||||
|
||||
if module.check_mode:
|
||||
remove_cmd += 'n'
|
||||
|
||||
if module.params['clean']:
|
||||
remove_cmd += 'c'
|
||||
|
||||
if module.params['quick']:
|
||||
remove_cmd += 'q'
|
||||
|
||||
for name in names:
|
||||
if pkg_spec[name]['installed_state'] is True:
|
||||
# Attempt to remove the package.
|
||||
(pkg_spec[name]['rc'], pkg_spec[name]['stdout'], pkg_spec[name]['stderr']) = execute_command("%s %s" % (remove_cmd, name), module)
|
||||
|
||||
if pkg_spec[name]['rc'] == 0:
|
||||
pkg_spec[name]['changed'] = True
|
||||
else:
|
||||
pkg_spec[name]['changed'] = False
|
||||
|
||||
else:
|
||||
pkg_spec[name]['rc'] = 0
|
||||
pkg_spec[name]['stdout'] = ''
|
||||
pkg_spec[name]['stderr'] = ''
|
||||
pkg_spec[name]['changed'] = False
|
||||
|
||||
|
||||
# Function used to parse the package name based on packages-specs(7).
|
||||
# The general name structure is "stem-version[-flavors]".
|
||||
#
|
||||
# Names containing "%" are a special variation not part of the
|
||||
# packages-specs(7) syntax. See pkg_add(1) on OpenBSD 6.0 or later for a
|
||||
# description.
|
||||
def parse_package_name(names, pkg_spec, module):
|
||||
|
||||
# Initialize empty list of package_latest() leftovers.
|
||||
pkg_spec['package_latest_leftovers'] = []
|
||||
|
||||
for name in names:
|
||||
module.debug("parse_package_name(): parsing name: %s" % name)
|
||||
# Do some initial matches so we can base the more advanced regex on that.
|
||||
version_match = re.search("-[0-9]", name)
|
||||
versionless_match = re.search("--", name)
|
||||
|
||||
# Stop if someone is giving us a name that both has a version and is
|
||||
# version-less at the same time.
|
||||
if version_match and versionless_match:
|
||||
module.fail_json(msg="package name both has a version and is version-less: " + name)
|
||||
|
||||
# All information for a given name is kept in the pkg_spec keyed by that name.
|
||||
pkg_spec[name] = {}
|
||||
|
||||
# If name includes a version.
|
||||
if version_match:
|
||||
match = re.search("^(?P<stem>[^%]+)-(?P<version>[0-9][^-]*)(?P<flavor_separator>-)?(?P<flavor>[a-z].*)?(%(?P<branch>.+))?$", name)
|
||||
if match:
|
||||
pkg_spec[name]['stem'] = match.group('stem')
|
||||
pkg_spec[name]['version_separator'] = '-'
|
||||
pkg_spec[name]['version'] = match.group('version')
|
||||
pkg_spec[name]['flavor_separator'] = match.group('flavor_separator')
|
||||
pkg_spec[name]['flavor'] = match.group('flavor')
|
||||
pkg_spec[name]['branch'] = match.group('branch')
|
||||
pkg_spec[name]['style'] = 'version'
|
||||
module.debug("version_match: stem: %(stem)s, version: %(version)s, flavor_separator: %(flavor_separator)s, "
|
||||
"flavor: %(flavor)s, branch: %(branch)s, style: %(style)s" % pkg_spec[name])
|
||||
else:
|
||||
module.fail_json(msg="unable to parse package name at version_match: " + name)
|
||||
|
||||
# If name includes no version but is version-less ("--").
|
||||
elif versionless_match:
|
||||
match = re.search("^(?P<stem>[^%]+)--(?P<flavor>[a-z].*)?(%(?P<branch>.+))?$", name)
|
||||
if match:
|
||||
pkg_spec[name]['stem'] = match.group('stem')
|
||||
pkg_spec[name]['version_separator'] = '-'
|
||||
pkg_spec[name]['version'] = None
|
||||
pkg_spec[name]['flavor_separator'] = '-'
|
||||
pkg_spec[name]['flavor'] = match.group('flavor')
|
||||
pkg_spec[name]['branch'] = match.group('branch')
|
||||
pkg_spec[name]['style'] = 'versionless'
|
||||
module.debug("versionless_match: stem: %(stem)s, flavor: %(flavor)s, branch: %(branch)s, style: %(style)s" % pkg_spec[name])
|
||||
else:
|
||||
module.fail_json(msg="unable to parse package name at versionless_match: " + name)
|
||||
|
||||
# If name includes no version, and is not version-less, it is all a
|
||||
# stem, possibly with a branch (%branchname) tacked on at the
|
||||
# end.
|
||||
else:
|
||||
match = re.search("^(?P<stem>[^%]+)(%(?P<branch>.+))?$", name)
|
||||
if match:
|
||||
pkg_spec[name]['stem'] = match.group('stem')
|
||||
pkg_spec[name]['version_separator'] = None
|
||||
pkg_spec[name]['version'] = None
|
||||
pkg_spec[name]['flavor_separator'] = None
|
||||
pkg_spec[name]['flavor'] = None
|
||||
pkg_spec[name]['branch'] = match.group('branch')
|
||||
pkg_spec[name]['style'] = 'stem'
|
||||
module.debug("stem_match: stem: %(stem)s, branch: %(branch)s, style: %(style)s" % pkg_spec[name])
|
||||
else:
|
||||
module.fail_json(msg="unable to parse package name at else: " + name)
|
||||
|
||||
# Verify that the managed host is new enough to support branch syntax.
|
||||
if pkg_spec[name]['branch']:
|
||||
branch_release = "6.0"
|
||||
|
||||
if StrictVersion(platform.release()) < StrictVersion(branch_release):
|
||||
module.fail_json(msg="package name using 'branch' syntax requires at least OpenBSD %s: %s" % (branch_release, name))
|
||||
|
||||
# Sanity check that there are no trailing dashes in flavor.
|
||||
# Try to stop strange stuff early so we can be strict later.
|
||||
if pkg_spec[name]['flavor']:
|
||||
match = re.search("-$", pkg_spec[name]['flavor'])
|
||||
if match:
|
||||
module.fail_json(msg="trailing dash in flavor: " + pkg_spec[name]['flavor'])
|
||||
|
||||
|
||||
# Function used for figuring out the port path.
|
||||
def get_package_source_path(name, pkg_spec, module):
|
||||
pkg_spec[name]['subpackage'] = None
|
||||
if pkg_spec[name]['stem'] == 'sqlports':
|
||||
return 'databases/sqlports'
|
||||
else:
|
||||
# try for an exact match first
|
||||
sqlports_db_file = '/usr/local/share/sqlports'
|
||||
if not os.path.isfile(sqlports_db_file):
|
||||
module.fail_json(msg="sqlports file '%s' is missing" % sqlports_db_file)
|
||||
|
||||
conn = sqlite3.connect(sqlports_db_file)
|
||||
first_part_of_query = 'SELECT fullpkgpath, fullpkgname FROM ports WHERE fullpkgname'
|
||||
query = first_part_of_query + ' = ?'
|
||||
module.debug("package_package_source_path(): exact query: %s" % query)
|
||||
cursor = conn.execute(query, (name,))
|
||||
results = cursor.fetchall()
|
||||
|
||||
# next, try for a fuzzier match
|
||||
if len(results) < 1:
|
||||
looking_for = pkg_spec[name]['stem'] + (pkg_spec[name]['version_separator'] or '-') + (pkg_spec[name]['version'] or '%')
|
||||
query = first_part_of_query + ' LIKE ?'
|
||||
if pkg_spec[name]['flavor']:
|
||||
looking_for += pkg_spec[name]['flavor_separator'] + pkg_spec[name]['flavor']
|
||||
module.debug("package_package_source_path(): fuzzy flavor query: %s" % query)
|
||||
cursor = conn.execute(query, (looking_for,))
|
||||
elif pkg_spec[name]['style'] == 'versionless':
|
||||
query += ' AND fullpkgname NOT LIKE ?'
|
||||
module.debug("package_package_source_path(): fuzzy versionless query: %s" % query)
|
||||
cursor = conn.execute(query, (looking_for, "%s-%%" % looking_for,))
|
||||
else:
|
||||
module.debug("package_package_source_path(): fuzzy query: %s" % query)
|
||||
cursor = conn.execute(query, (looking_for,))
|
||||
results = cursor.fetchall()
|
||||
|
||||
# error if we don't find exactly 1 match
|
||||
conn.close()
|
||||
if len(results) < 1:
|
||||
module.fail_json(msg="could not find a port by the name '%s'" % name)
|
||||
if len(results) > 1:
|
||||
matches = map(lambda x: x[1], results)
|
||||
module.fail_json(msg="too many matches, unsure which to build: %s" % ' OR '.join(matches))
|
||||
|
||||
# there's exactly 1 match, so figure out the subpackage, if any, then return
|
||||
fullpkgpath = results[0][0]
|
||||
parts = fullpkgpath.split(',')
|
||||
if len(parts) > 1 and parts[1][0] == '-':
|
||||
pkg_spec[name]['subpackage'] = parts[1]
|
||||
return parts[0]
|
||||
|
||||
|
||||
# Function used for upgrading all installed packages.
|
||||
def upgrade_packages(pkg_spec, module):
|
||||
if module.check_mode:
|
||||
upgrade_cmd = 'pkg_add -Imnu'
|
||||
else:
|
||||
upgrade_cmd = 'pkg_add -Imu'
|
||||
|
||||
# Create a minimal pkg_spec entry for '*' to store return values.
|
||||
pkg_spec['*'] = {}
|
||||
|
||||
# Attempt to upgrade all packages.
|
||||
pkg_spec['*']['rc'], pkg_spec['*']['stdout'], pkg_spec['*']['stderr'] = execute_command("%s" % upgrade_cmd, module)
|
||||
|
||||
# Try to find any occurrence of a package changing version like:
|
||||
# "bzip2-1.0.6->1.0.6p0: ok".
|
||||
match = re.search(r"\W\w.+->.+: ok\W", pkg_spec['*']['stdout'])
|
||||
if match:
|
||||
pkg_spec['*']['changed'] = True
|
||||
|
||||
else:
|
||||
pkg_spec['*']['changed'] = False
|
||||
|
||||
# It seems we can not trust the return value, so depend on the presence of
|
||||
# stderr to know if something failed.
|
||||
if pkg_spec['*']['stderr']:
|
||||
pkg_spec['*']['rc'] = 1
|
||||
else:
|
||||
pkg_spec['*']['rc'] = 0
|
||||
|
||||
|
||||
# ===========================================
|
||||
# Main control flow.
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='list', required=True),
|
||||
state=dict(type='str', default='present', choices=['absent', 'installed', 'latest', 'present', 'removed']),
|
||||
build=dict(type='bool', default=False),
|
||||
ports_dir=dict(type='path', default='/usr/ports'),
|
||||
quick=dict(type='bool', default=False),
|
||||
clean=dict(type='bool', default=False),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
name = module.params['name']
|
||||
state = module.params['state']
|
||||
build = module.params['build']
|
||||
ports_dir = module.params['ports_dir']
|
||||
|
||||
rc = 0
|
||||
stdout = ''
|
||||
stderr = ''
|
||||
result = {}
|
||||
result['name'] = name
|
||||
result['state'] = state
|
||||
result['build'] = build
|
||||
|
||||
# The data structure used to keep track of package information.
|
||||
pkg_spec = {}
|
||||
|
||||
if build is True:
|
||||
if not os.path.isdir(ports_dir):
|
||||
module.fail_json(msg="the ports source directory %s does not exist" % (ports_dir))
|
||||
|
||||
# build sqlports if its not installed yet
|
||||
parse_package_name(['sqlports'], pkg_spec, module)
|
||||
get_package_state(['sqlports'], pkg_spec, module)
|
||||
if not pkg_spec['sqlports']['installed_state']:
|
||||
module.debug("main(): installing 'sqlports' because build=%s" % module.params['build'])
|
||||
package_present(['sqlports'], pkg_spec, module)
|
||||
|
||||
asterisk_name = False
|
||||
for n in name:
|
||||
if n == '*':
|
||||
if len(name) != 1:
|
||||
module.fail_json(msg="the package name '*' can not be mixed with other names")
|
||||
|
||||
asterisk_name = True
|
||||
|
||||
if asterisk_name:
|
||||
if state != 'latest':
|
||||
module.fail_json(msg="the package name '*' is only valid when using state=latest")
|
||||
else:
|
||||
# Perform an upgrade of all installed packages.
|
||||
upgrade_packages(pkg_spec, module)
|
||||
else:
|
||||
# Parse package names and put results in the pkg_spec dictionary.
|
||||
parse_package_name(name, pkg_spec, module)
|
||||
|
||||
# Not sure how the branch syntax is supposed to play together
|
||||
# with build mode. Disable it for now.
|
||||
for n in name:
|
||||
if pkg_spec[n]['branch'] and module.params['build'] is True:
|
||||
module.fail_json(msg="the combination of 'branch' syntax and build=%s is not supported: %s" % (module.params['build'], n))
|
||||
|
||||
# Get state for all package names.
|
||||
get_package_state(name, pkg_spec, module)
|
||||
|
||||
# Perform requested action.
|
||||
if state in ['installed', 'present']:
|
||||
package_present(name, pkg_spec, module)
|
||||
elif state in ['absent', 'removed']:
|
||||
package_absent(name, pkg_spec, module)
|
||||
elif state == 'latest':
|
||||
package_latest(name, pkg_spec, module)
|
||||
|
||||
# The combined changed status for all requested packages. If anything
|
||||
# is changed this is set to True.
|
||||
combined_changed = False
|
||||
|
||||
# The combined failed status for all requested packages. If anything
|
||||
# failed this is set to True.
|
||||
combined_failed = False
|
||||
|
||||
# We combine all error messages in this comma separated string, for example:
|
||||
# "msg": "Can't find nmapp\n, Can't find nmappp\n"
|
||||
combined_error_message = ''
|
||||
|
||||
# Loop over all requested package names and check if anything failed or
|
||||
# changed.
|
||||
for n in name:
|
||||
if pkg_spec[n]['rc'] != 0:
|
||||
combined_failed = True
|
||||
if pkg_spec[n]['stderr']:
|
||||
if combined_error_message:
|
||||
combined_error_message += ", %s" % pkg_spec[n]['stderr']
|
||||
else:
|
||||
combined_error_message = pkg_spec[n]['stderr']
|
||||
else:
|
||||
if combined_error_message:
|
||||
combined_error_message += ", %s" % pkg_spec[n]['stdout']
|
||||
else:
|
||||
combined_error_message = pkg_spec[n]['stdout']
|
||||
|
||||
if pkg_spec[n]['changed'] is True:
|
||||
combined_changed = True
|
||||
|
||||
# If combined_error_message contains anything at least some part of the
|
||||
# list of requested package names failed.
|
||||
if combined_failed:
|
||||
module.fail_json(msg=combined_error_message, **result)
|
||||
|
||||
result['changed'] = combined_changed
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
193
plugins/modules/packaging/os/opkg.py
Normal file
193
plugins/modules/packaging/os/opkg.py
Normal file
@@ -0,0 +1,193 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, Patrick Pelletier <pp.pelletier@gmail.com>
|
||||
# Based on pacman (Afterburn) and pkgin (Shaun Zinck) modules
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: opkg
|
||||
author: "Patrick Pelletier (@skinp)"
|
||||
short_description: Package manager for OpenWrt
|
||||
description:
|
||||
- Manages OpenWrt packages
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- name of package to install/remove
|
||||
required: true
|
||||
state:
|
||||
description:
|
||||
- state of the package
|
||||
choices: [ 'present', 'absent' ]
|
||||
default: present
|
||||
force:
|
||||
description:
|
||||
- opkg --force parameter used
|
||||
choices:
|
||||
- ""
|
||||
- "depends"
|
||||
- "maintainer"
|
||||
- "reinstall"
|
||||
- "overwrite"
|
||||
- "downgrade"
|
||||
- "space"
|
||||
- "postinstall"
|
||||
- "remove"
|
||||
- "checksum"
|
||||
- "removal-of-dependent-packages"
|
||||
default: absent
|
||||
update_cache:
|
||||
description:
|
||||
- update the package db first
|
||||
default: "no"
|
||||
type: bool
|
||||
requirements:
|
||||
- opkg
|
||||
- python
|
||||
'''
|
||||
EXAMPLES = '''
|
||||
- opkg:
|
||||
name: foo
|
||||
state: present
|
||||
|
||||
- opkg:
|
||||
name: foo
|
||||
state: present
|
||||
update_cache: yes
|
||||
|
||||
- opkg:
|
||||
name: foo
|
||||
state: absent
|
||||
|
||||
- opkg:
|
||||
name: foo,bar
|
||||
state: absent
|
||||
|
||||
- opkg:
|
||||
name: foo
|
||||
state: present
|
||||
force: overwrite
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six.moves import shlex_quote
|
||||
|
||||
|
||||
def update_package_db(module, opkg_path):
|
||||
""" Updates packages list. """
|
||||
|
||||
rc, out, err = module.run_command("%s update" % opkg_path)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="could not update package db")
|
||||
|
||||
|
||||
def query_package(module, opkg_path, name, state="present"):
|
||||
""" Returns whether a package is installed or not. """
|
||||
|
||||
if state == "present":
|
||||
|
||||
rc, out, err = module.run_command("%s list-installed | grep -q \"^%s \"" % (shlex_quote(opkg_path), shlex_quote(name)), use_unsafe_shell=True)
|
||||
if rc == 0:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def remove_packages(module, opkg_path, packages):
|
||||
""" Uninstalls one or more packages if installed. """
|
||||
|
||||
p = module.params
|
||||
force = p["force"]
|
||||
if force:
|
||||
force = "--force-%s" % force
|
||||
|
||||
remove_c = 0
|
||||
# Using a for loop in case of error, we can report the package that failed
|
||||
for package in packages:
|
||||
# Query the package first, to see if we even need to remove
|
||||
if not query_package(module, opkg_path, package):
|
||||
continue
|
||||
|
||||
rc, out, err = module.run_command("%s remove %s %s" % (opkg_path, force, package))
|
||||
|
||||
if query_package(module, opkg_path, package):
|
||||
module.fail_json(msg="failed to remove %s: %s" % (package, out))
|
||||
|
||||
remove_c += 1
|
||||
|
||||
if remove_c > 0:
|
||||
|
||||
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
|
||||
|
||||
module.exit_json(changed=False, msg="package(s) already absent")
|
||||
|
||||
|
||||
def install_packages(module, opkg_path, packages):
|
||||
""" Installs one or more packages if not already installed. """
|
||||
|
||||
p = module.params
|
||||
force = p["force"]
|
||||
if force:
|
||||
force = "--force-%s" % force
|
||||
|
||||
install_c = 0
|
||||
|
||||
for package in packages:
|
||||
if query_package(module, opkg_path, package):
|
||||
continue
|
||||
|
||||
rc, out, err = module.run_command("%s install %s %s" % (opkg_path, force, package))
|
||||
|
||||
if not query_package(module, opkg_path, package):
|
||||
module.fail_json(msg="failed to install %s: %s" % (package, out))
|
||||
|
||||
install_c += 1
|
||||
|
||||
if install_c > 0:
|
||||
module.exit_json(changed=True, msg="installed %s package(s)" % (install_c))
|
||||
|
||||
module.exit_json(changed=False, msg="package(s) already present")
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(aliases=["pkg"], required=True),
|
||||
state=dict(default="present", choices=["present", "installed", "absent", "removed"]),
|
||||
force=dict(default="", choices=["", "depends", "maintainer", "reinstall", "overwrite", "downgrade", "space", "postinstall", "remove",
|
||||
"checksum", "removal-of-dependent-packages"]),
|
||||
update_cache=dict(default="no", aliases=["update-cache"], type='bool')
|
||||
)
|
||||
)
|
||||
|
||||
opkg_path = module.get_bin_path('opkg', True, ['/bin'])
|
||||
|
||||
p = module.params
|
||||
|
||||
if p["update_cache"]:
|
||||
update_package_db(module, opkg_path)
|
||||
|
||||
pkgs = p["name"].split(",")
|
||||
|
||||
if p["state"] in ["present", "installed"]:
|
||||
install_packages(module, opkg_path, pkgs)
|
||||
|
||||
elif p["state"] in ["absent", "removed"]:
|
||||
remove_packages(module, opkg_path, pkgs)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
484
plugins/modules/packaging/os/pacman.py
Normal file
484
plugins/modules/packaging/os/pacman.py
Normal file
@@ -0,0 +1,484 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2012, Afterburn <https://github.com/afterburn>
|
||||
# Copyright: (c) 2013, Aaron Bull Schaefer <aaron@elasticdog.com>
|
||||
# Copyright: (c) 2015, Indrajit Raychaudhuri <irc+code@indrajit.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: pacman
|
||||
short_description: Manage packages with I(pacman)
|
||||
description:
|
||||
- Manage packages with the I(pacman) package manager, which is used by Arch Linux and its variants.
|
||||
author:
|
||||
- Indrajit Raychaudhuri (@indrajitr)
|
||||
- Aaron Bull Schaefer (@elasticdog) <aaron@elasticdog.com>
|
||||
- Maxime de Roucy (@tchernomax)
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- Name or list of names of the package(s) or file(s) to install, upgrade, or remove.
|
||||
Can't be used in combination with C(upgrade).
|
||||
aliases: [ package, pkg ]
|
||||
type: list
|
||||
elements: str
|
||||
|
||||
state:
|
||||
description:
|
||||
- Desired state of the package.
|
||||
default: present
|
||||
choices: [ absent, latest, present ]
|
||||
|
||||
force:
|
||||
description:
|
||||
- When removing package, force remove package, without any checks.
|
||||
Same as `extra_args="--nodeps --nodeps"`.
|
||||
When update_cache, force redownload repo databases.
|
||||
Same as `update_cache_extra_args="--refresh --refresh"`.
|
||||
default: no
|
||||
type: bool
|
||||
|
||||
extra_args:
|
||||
description:
|
||||
- Additional option to pass to pacman when enforcing C(state).
|
||||
default:
|
||||
|
||||
update_cache:
|
||||
description:
|
||||
- Whether or not to refresh the master package lists.
|
||||
- This can be run as part of a package installation or as a separate step.
|
||||
default: no
|
||||
type: bool
|
||||
aliases: [ update-cache ]
|
||||
|
||||
update_cache_extra_args:
|
||||
description:
|
||||
- Additional option to pass to pacman when enforcing C(update_cache).
|
||||
default:
|
||||
|
||||
upgrade:
|
||||
description:
|
||||
- Whether or not to upgrade the whole system.
|
||||
Can't be used in combination with C(name).
|
||||
default: no
|
||||
type: bool
|
||||
|
||||
upgrade_extra_args:
|
||||
description:
|
||||
- Additional option to pass to pacman when enforcing C(upgrade).
|
||||
default:
|
||||
|
||||
notes:
|
||||
- When used with a `loop:` each package will be processed individually,
|
||||
it is much more efficient to pass the list directly to the `name` option.
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
packages:
|
||||
description: a list of packages that have been changed
|
||||
returned: when upgrade is set to yes
|
||||
type: list
|
||||
sample: [ package, other-package ]
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Install package foo from repo
|
||||
pacman:
|
||||
name: foo
|
||||
state: present
|
||||
|
||||
- name: Install package bar from file
|
||||
pacman:
|
||||
name: ~/bar-1.0-1-any.pkg.tar.xz
|
||||
state: present
|
||||
|
||||
- name: Install package foo from repo and bar from file
|
||||
pacman:
|
||||
name:
|
||||
- foo
|
||||
- ~/bar-1.0-1-any.pkg.tar.xz
|
||||
state: present
|
||||
|
||||
- name: Upgrade package foo
|
||||
pacman:
|
||||
name: foo
|
||||
state: latest
|
||||
update_cache: yes
|
||||
|
||||
- name: Remove packages foo and bar
|
||||
pacman:
|
||||
name:
|
||||
- foo
|
||||
- bar
|
||||
state: absent
|
||||
|
||||
- name: Recursively remove package baz
|
||||
pacman:
|
||||
name: baz
|
||||
state: absent
|
||||
extra_args: --recursive
|
||||
|
||||
- name: Run the equivalent of "pacman -Sy" as a separate step
|
||||
pacman:
|
||||
update_cache: yes
|
||||
|
||||
- name: Run the equivalent of "pacman -Su" as a separate step
|
||||
pacman:
|
||||
upgrade: yes
|
||||
|
||||
- name: Run the equivalent of "pacman -Syu" as a separate step
|
||||
pacman:
|
||||
update_cache: yes
|
||||
upgrade: yes
|
||||
|
||||
- name: Run the equivalent of "pacman -Rdd", force remove package baz
|
||||
pacman:
|
||||
name: baz
|
||||
state: absent
|
||||
force: yes
|
||||
'''
|
||||
|
||||
import re
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def get_version(pacman_output):
|
||||
"""Take pacman -Qi or pacman -Si output and get the Version"""
|
||||
lines = pacman_output.split('\n')
|
||||
for line in lines:
|
||||
if line.startswith('Version '):
|
||||
return line.split(':')[1].strip()
|
||||
return None
|
||||
|
||||
|
||||
def get_name(module, pacman_output):
|
||||
"""Take pacman -Qi or pacman -Si output and get the package name"""
|
||||
lines = pacman_output.split('\n')
|
||||
for line in lines:
|
||||
if line.startswith('Name '):
|
||||
return line.split(':')[1].strip()
|
||||
module.fail_json(msg="get_name: fail to retrieve package name from pacman output")
|
||||
|
||||
|
||||
def query_package(module, pacman_path, name, state="present"):
|
||||
"""Query the package status in both the local system and the repository. Returns a boolean to indicate if the package is installed, a second
|
||||
boolean to indicate if the package is up-to-date and a third boolean to indicate whether online information were available
|
||||
"""
|
||||
if state == "present":
|
||||
lcmd = "%s --query --info %s" % (pacman_path, name)
|
||||
lrc, lstdout, lstderr = module.run_command(lcmd, check_rc=False)
|
||||
if lrc != 0:
|
||||
# package is not installed locally
|
||||
return False, False, False
|
||||
else:
|
||||
# a non-zero exit code doesn't always mean the package is installed
|
||||
# for example, if the package name queried is "provided" by another package
|
||||
installed_name = get_name(module, lstdout)
|
||||
if installed_name != name:
|
||||
return False, False, False
|
||||
|
||||
# get the version installed locally (if any)
|
||||
lversion = get_version(lstdout)
|
||||
|
||||
rcmd = "%s --sync --info %s" % (pacman_path, name)
|
||||
rrc, rstdout, rstderr = module.run_command(rcmd, check_rc=False)
|
||||
# get the version in the repository
|
||||
rversion = get_version(rstdout)
|
||||
|
||||
if rrc == 0:
|
||||
# Return True to indicate that the package is installed locally, and the result of the version number comparison
|
||||
# to determine if the package is up-to-date.
|
||||
return True, (lversion == rversion), False
|
||||
|
||||
# package is installed but cannot fetch remote Version. Last True stands for the error
|
||||
return True, True, True
|
||||
|
||||
|
||||
def update_package_db(module, pacman_path):
|
||||
if module.params['force']:
|
||||
module.params["update_cache_extra_args"] += " --refresh --refresh"
|
||||
|
||||
cmd = "%s --sync --refresh %s" % (pacman_path, module.params["update_cache_extra_args"])
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
|
||||
if rc == 0:
|
||||
return True
|
||||
else:
|
||||
module.fail_json(msg="could not update package db")
|
||||
|
||||
|
||||
def upgrade(module, pacman_path):
|
||||
cmdupgrade = "%s --sync --sysupgrade --quiet --noconfirm %s" % (pacman_path, module.params["upgrade_extra_args"])
|
||||
cmdneedrefresh = "%s --query --upgrades" % (pacman_path)
|
||||
rc, stdout, stderr = module.run_command(cmdneedrefresh, check_rc=False)
|
||||
data = stdout.split('\n')
|
||||
data.remove('')
|
||||
packages = []
|
||||
diff = {
|
||||
'before': '',
|
||||
'after': '',
|
||||
}
|
||||
|
||||
if rc == 0:
|
||||
# Match lines of `pacman -Qu` output of the form:
|
||||
# (package name) (before version-release) -> (after version-release)
|
||||
# e.g., "ansible 2.7.1-1 -> 2.7.2-1"
|
||||
regex = re.compile(r'([\w+\-.@]+) (\S+-\S+) -> (\S+-\S+)')
|
||||
for p in data:
|
||||
m = regex.search(p)
|
||||
packages.append(m.group(1))
|
||||
if module._diff:
|
||||
diff['before'] += "%s-%s\n" % (m.group(1), m.group(2))
|
||||
diff['after'] += "%s-%s\n" % (m.group(1), m.group(3))
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True, msg="%s package(s) would be upgraded" % (len(data)), packages=packages, diff=diff)
|
||||
rc, stdout, stderr = module.run_command(cmdupgrade, check_rc=False)
|
||||
if rc == 0:
|
||||
module.exit_json(changed=True, msg='System upgraded', packages=packages, diff=diff)
|
||||
else:
|
||||
module.fail_json(msg="Could not upgrade")
|
||||
else:
|
||||
module.exit_json(changed=False, msg='Nothing to upgrade', packages=packages)
|
||||
|
||||
|
||||
def remove_packages(module, pacman_path, packages):
|
||||
data = []
|
||||
diff = {
|
||||
'before': '',
|
||||
'after': '',
|
||||
}
|
||||
|
||||
if module.params["force"]:
|
||||
module.params["extra_args"] += " --nodeps --nodeps"
|
||||
|
||||
remove_c = 0
|
||||
# Using a for loop in case of error, we can report the package that failed
|
||||
for package in packages:
|
||||
# Query the package first, to see if we even need to remove
|
||||
installed, updated, unknown = query_package(module, pacman_path, package)
|
||||
if not installed:
|
||||
continue
|
||||
|
||||
cmd = "%s --remove --noconfirm --noprogressbar %s %s" % (pacman_path, module.params["extra_args"], package)
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed to remove %s" % (package))
|
||||
|
||||
if module._diff:
|
||||
d = stdout.split('\n')[2].split(' ')[2:]
|
||||
for i, pkg in enumerate(d):
|
||||
d[i] = re.sub('-[0-9].*$', '', d[i].split('/')[-1])
|
||||
diff['before'] += "%s\n" % pkg
|
||||
data.append('\n'.join(d))
|
||||
|
||||
remove_c += 1
|
||||
|
||||
if remove_c > 0:
|
||||
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c, diff=diff)
|
||||
|
||||
module.exit_json(changed=False, msg="package(s) already absent")
|
||||
|
||||
|
||||
def install_packages(module, pacman_path, state, packages, package_files):
|
||||
install_c = 0
|
||||
package_err = []
|
||||
message = ""
|
||||
data = []
|
||||
diff = {
|
||||
'before': '',
|
||||
'after': '',
|
||||
}
|
||||
|
||||
to_install_repos = []
|
||||
to_install_files = []
|
||||
for i, package in enumerate(packages):
|
||||
# if the package is installed and state == present or state == latest and is up-to-date then skip
|
||||
installed, updated, latestError = query_package(module, pacman_path, package)
|
||||
if latestError and state == 'latest':
|
||||
package_err.append(package)
|
||||
|
||||
if installed and (state == 'present' or (state == 'latest' and updated)):
|
||||
continue
|
||||
|
||||
if package_files[i]:
|
||||
to_install_files.append(package_files[i])
|
||||
else:
|
||||
to_install_repos.append(package)
|
||||
|
||||
if to_install_repos:
|
||||
cmd = "%s --sync --noconfirm --noprogressbar --needed %s %s" % (pacman_path, module.params["extra_args"], " ".join(to_install_repos))
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed to install %s: %s" % (" ".join(to_install_repos), stderr))
|
||||
|
||||
# As we pass `--needed` to pacman returns a single line of ` there is nothing to do` if no change is performed.
|
||||
# The check for > 3 is here because we pick the 4th line in normal operation.
|
||||
if len(stdout.split('\n')) > 3:
|
||||
data = stdout.split('\n')[3].split(' ')[2:]
|
||||
data = [i for i in data if i != '']
|
||||
for i, pkg in enumerate(data):
|
||||
data[i] = re.sub('-[0-9].*$', '', data[i].split('/')[-1])
|
||||
if module._diff:
|
||||
diff['after'] += "%s\n" % pkg
|
||||
|
||||
install_c += len(to_install_repos)
|
||||
|
||||
if to_install_files:
|
||||
cmd = "%s --upgrade --noconfirm --noprogressbar --needed %s %s" % (pacman_path, module.params["extra_args"], " ".join(to_install_files))
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed to install %s: %s" % (" ".join(to_install_files), stderr))
|
||||
|
||||
# As we pass `--needed` to pacman returns a single line of ` there is nothing to do` if no change is performed.
|
||||
# The check for > 3 is here because we pick the 4th line in normal operation.
|
||||
if len(stdout.split('\n')) > 3:
|
||||
data = stdout.split('\n')[3].split(' ')[2:]
|
||||
data = [i for i in data if i != '']
|
||||
for i, pkg in enumerate(data):
|
||||
data[i] = re.sub('-[0-9].*$', '', data[i].split('/')[-1])
|
||||
if module._diff:
|
||||
diff['after'] += "%s\n" % pkg
|
||||
|
||||
install_c += len(to_install_files)
|
||||
|
||||
if state == 'latest' and len(package_err) > 0:
|
||||
message = "But could not ensure 'latest' state for %s package(s) as remote version could not be fetched." % (package_err)
|
||||
|
||||
if install_c > 0:
|
||||
module.exit_json(changed=True, msg="installed %s package(s). %s" % (install_c, message), diff=diff)
|
||||
|
||||
module.exit_json(changed=False, msg="package(s) already installed. %s" % (message), diff=diff)
|
||||
|
||||
|
||||
def check_packages(module, pacman_path, packages, state):
|
||||
would_be_changed = []
|
||||
diff = {
|
||||
'before': '',
|
||||
'after': '',
|
||||
'before_header': '',
|
||||
'after_header': ''
|
||||
}
|
||||
|
||||
for package in packages:
|
||||
installed, updated, unknown = query_package(module, pacman_path, package)
|
||||
if ((state in ["present", "latest"] and not installed) or
|
||||
(state == "absent" and installed) or
|
||||
(state == "latest" and not updated)):
|
||||
would_be_changed.append(package)
|
||||
if would_be_changed:
|
||||
if state == "absent":
|
||||
state = "removed"
|
||||
|
||||
if module._diff and (state == 'removed'):
|
||||
diff['before_header'] = 'removed'
|
||||
diff['before'] = '\n'.join(would_be_changed) + '\n'
|
||||
elif module._diff and ((state == 'present') or (state == 'latest')):
|
||||
diff['after_header'] = 'installed'
|
||||
diff['after'] = '\n'.join(would_be_changed) + '\n'
|
||||
|
||||
module.exit_json(changed=True, msg="%s package(s) would be %s" % (
|
||||
len(would_be_changed), state), diff=diff)
|
||||
else:
|
||||
module.exit_json(changed=False, msg="package(s) already %s" % state, diff=diff)
|
||||
|
||||
|
||||
def expand_package_groups(module, pacman_path, pkgs):
|
||||
expanded = []
|
||||
|
||||
for pkg in pkgs:
|
||||
if pkg: # avoid empty strings
|
||||
cmd = "%s --sync --groups --quiet %s" % (pacman_path, pkg)
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
|
||||
if rc == 0:
|
||||
# A group was found matching the name, so expand it
|
||||
for name in stdout.split('\n'):
|
||||
name = name.strip()
|
||||
if name:
|
||||
expanded.append(name)
|
||||
else:
|
||||
expanded.append(pkg)
|
||||
|
||||
return expanded
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='list', elements='str', aliases=['pkg', 'package']),
|
||||
state=dict(type='str', default='present', choices=['present', 'installed', 'latest', 'absent', 'removed']),
|
||||
force=dict(type='bool', default=False),
|
||||
extra_args=dict(type='str', default=''),
|
||||
upgrade=dict(type='bool', default=False),
|
||||
upgrade_extra_args=dict(type='str', default=''),
|
||||
update_cache=dict(type='bool', default=False, aliases=['update-cache']),
|
||||
update_cache_extra_args=dict(type='str', default=''),
|
||||
),
|
||||
required_one_of=[['name', 'update_cache', 'upgrade']],
|
||||
mutually_exclusive=[['name', 'upgrade']],
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
pacman_path = module.get_bin_path('pacman', True)
|
||||
module.run_command_environ_update = dict(LC_ALL='C')
|
||||
|
||||
p = module.params
|
||||
|
||||
# normalize the state parameter
|
||||
if p['state'] in ['present', 'installed']:
|
||||
p['state'] = 'present'
|
||||
elif p['state'] in ['absent', 'removed']:
|
||||
p['state'] = 'absent'
|
||||
|
||||
if p["update_cache"] and not module.check_mode:
|
||||
update_package_db(module, pacman_path)
|
||||
if not (p['name'] or p['upgrade']):
|
||||
module.exit_json(changed=True, msg='Updated the package master lists')
|
||||
|
||||
if p['update_cache'] and module.check_mode and not (p['name'] or p['upgrade']):
|
||||
module.exit_json(changed=True, msg='Would have updated the package cache')
|
||||
|
||||
if p['upgrade']:
|
||||
upgrade(module, pacman_path)
|
||||
|
||||
if p['name']:
|
||||
pkgs = expand_package_groups(module, pacman_path, p['name'])
|
||||
|
||||
pkg_files = []
|
||||
for i, pkg in enumerate(pkgs):
|
||||
if not pkg: # avoid empty strings
|
||||
continue
|
||||
elif re.match(r".*\.pkg\.tar(\.(gz|bz2|xz|lrz|lzo|Z))?$", pkg):
|
||||
# The package given is a filename, extract the raw pkg name from
|
||||
# it and store the filename
|
||||
pkg_files.append(pkg)
|
||||
pkgs[i] = re.sub(r'-[0-9].*$', '', pkgs[i].split('/')[-1])
|
||||
else:
|
||||
pkg_files.append(None)
|
||||
|
||||
if module.check_mode:
|
||||
check_packages(module, pacman_path, pkgs, p['state'])
|
||||
|
||||
if p['state'] in ['present', 'latest']:
|
||||
install_packages(module, pacman_path, p['state'], pkgs, pkg_files)
|
||||
elif p['state'] == 'absent':
|
||||
remove_packages(module, pacman_path, pkgs)
|
||||
else:
|
||||
module.exit_json(changed=False, msg="No package specified to work on.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
179
plugins/modules/packaging/os/pkg5.py
Normal file
179
plugins/modules/packaging/os/pkg5.py
Normal file
@@ -0,0 +1,179 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2014, Peter Oliver <ansible@mavit.org.uk>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: pkg5
|
||||
author:
|
||||
- Peter Oliver (@mavit)
|
||||
short_description: Manages packages with the Solaris 11 Image Packaging System
|
||||
description:
|
||||
- IPS packages are the native packages in Solaris 11 and higher.
|
||||
notes:
|
||||
- The naming of IPS packages is explained at U(http://www.oracle.com/technetwork/articles/servers-storage-admin/ips-package-versioning-2232906.html).
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- An FRMI of the package(s) to be installed/removed/updated.
|
||||
- Multiple packages may be specified, separated by C(,).
|
||||
required: true
|
||||
state:
|
||||
description:
|
||||
- Whether to install (I(present), I(latest)), or remove (I(absent)) a package.
|
||||
choices: [ absent, latest, present ]
|
||||
default: present
|
||||
accept_licenses:
|
||||
description:
|
||||
- Accept any licences.
|
||||
type: bool
|
||||
default: no
|
||||
aliases: [ accept, accept_licences ]
|
||||
be_name:
|
||||
description:
|
||||
- Creates a new boot environment with the given name.
|
||||
type: str
|
||||
refresh:
|
||||
description:
|
||||
- Refresh publishers before execution.
|
||||
type: bool
|
||||
default: yes
|
||||
'''
|
||||
EXAMPLES = '''
|
||||
- name: Install Vim
|
||||
pkg5:
|
||||
name: editor/vim
|
||||
|
||||
- name: Install Vim without refreshing publishers
|
||||
pkg5:
|
||||
name: editor/vim
|
||||
refresh: no
|
||||
|
||||
- name: Remove finger daemon
|
||||
pkg5:
|
||||
name: service/network/finger
|
||||
state: absent
|
||||
|
||||
- name: Install several packages at once
|
||||
pkg5:
|
||||
name:
|
||||
- /file/gnu-findutils
|
||||
- /text/gnu-grep
|
||||
'''
|
||||
|
||||
import re
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='list', required=True),
|
||||
state=dict(type='str', default='present', choices=['absent', 'installed', 'latest', 'present', 'removed', 'uninstalled']),
|
||||
accept_licenses=dict(type='bool', default=False, aliases=['accept', 'accept_licences']),
|
||||
be_name=dict(type='str'),
|
||||
refresh=dict(type='bool', default=True),
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
params = module.params
|
||||
packages = []
|
||||
|
||||
# pkg(5) FRMIs include a comma before the release number, but
|
||||
# AnsibleModule will have split this into multiple items for us.
|
||||
# Try to spot where this has happened and fix it.
|
||||
for fragment in params['name']:
|
||||
if re.search(r'^\d+(?:\.\d+)*', fragment) and packages and re.search(r'@[^,]*$', packages[-1]):
|
||||
packages[-1] += ',' + fragment
|
||||
else:
|
||||
packages.append(fragment)
|
||||
|
||||
if params['state'] in ['present', 'installed']:
|
||||
ensure(module, 'present', packages, params)
|
||||
elif params['state'] in ['latest']:
|
||||
ensure(module, 'latest', packages, params)
|
||||
elif params['state'] in ['absent', 'uninstalled', 'removed']:
|
||||
ensure(module, 'absent', packages, params)
|
||||
|
||||
|
||||
def ensure(module, state, packages, params):
|
||||
response = {
|
||||
'results': [],
|
||||
'msg': '',
|
||||
}
|
||||
behaviour = {
|
||||
'present': {
|
||||
'filter': lambda p: not is_installed(module, p),
|
||||
'subcommand': 'install',
|
||||
},
|
||||
'latest': {
|
||||
'filter': lambda p: (
|
||||
not is_installed(module, p) or not is_latest(module, p)
|
||||
),
|
||||
'subcommand': 'install',
|
||||
},
|
||||
'absent': {
|
||||
'filter': lambda p: is_installed(module, p),
|
||||
'subcommand': 'uninstall',
|
||||
},
|
||||
}
|
||||
|
||||
if module.check_mode:
|
||||
dry_run = ['-n']
|
||||
else:
|
||||
dry_run = []
|
||||
|
||||
if params['accept_licenses']:
|
||||
accept_licenses = ['--accept']
|
||||
else:
|
||||
accept_licenses = []
|
||||
|
||||
if params['be_name']:
|
||||
beadm = ['--be-name=' + module.params['be_name']]
|
||||
else:
|
||||
beadm = []
|
||||
|
||||
if params['refresh']:
|
||||
no_refresh = []
|
||||
else:
|
||||
no_refresh = ['--no-refresh']
|
||||
|
||||
to_modify = filter(behaviour[state]['filter'], packages)
|
||||
if to_modify:
|
||||
rc, out, err = module.run_command(['pkg', behaviour[state]['subcommand']] + dry_run + accept_licenses + beadm + no_refresh + ['-q', '--'] + to_modify)
|
||||
response['rc'] = rc
|
||||
response['results'].append(out)
|
||||
response['msg'] += err
|
||||
response['changed'] = True
|
||||
if rc == 4:
|
||||
response['changed'] = False
|
||||
response['failed'] = False
|
||||
elif rc != 0:
|
||||
module.fail_json(**response)
|
||||
|
||||
module.exit_json(**response)
|
||||
|
||||
|
||||
def is_installed(module, package):
|
||||
rc, out, err = module.run_command(['pkg', 'list', '--', package])
|
||||
return not bool(int(rc))
|
||||
|
||||
|
||||
def is_latest(module, package):
|
||||
rc, out, err = module.run_command(['pkg', 'list', '-u', '--', package])
|
||||
return bool(int(rc))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
201
plugins/modules/packaging/os/pkg5_publisher.py
Normal file
201
plugins/modules/packaging/os/pkg5_publisher.py
Normal file
@@ -0,0 +1,201 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2014 Peter Oliver <ansible@mavit.org.uk>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: pkg5_publisher
|
||||
author: "Peter Oliver (@mavit)"
|
||||
short_description: Manages Solaris 11 Image Packaging System publishers
|
||||
description:
|
||||
- IPS packages are the native packages in Solaris 11 and higher.
|
||||
- This modules will configure which publishers a client will download IPS
|
||||
packages from.
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The publisher's name.
|
||||
required: true
|
||||
aliases: [ publisher ]
|
||||
state:
|
||||
description:
|
||||
- Whether to ensure that a publisher is present or absent.
|
||||
default: present
|
||||
choices: [ present, absent ]
|
||||
sticky:
|
||||
description:
|
||||
- Packages installed from a sticky repository can only receive updates
|
||||
from that repository.
|
||||
type: bool
|
||||
enabled:
|
||||
description:
|
||||
- Is the repository enabled or disabled?
|
||||
type: bool
|
||||
origin:
|
||||
description:
|
||||
- A path or URL to the repository.
|
||||
- Multiple values may be provided.
|
||||
mirror:
|
||||
description:
|
||||
- A path or URL to the repository mirror.
|
||||
- Multiple values may be provided.
|
||||
'''
|
||||
EXAMPLES = '''
|
||||
# Fetch packages for the solaris publisher direct from Oracle:
|
||||
- pkg5_publisher:
|
||||
name: solaris
|
||||
sticky: true
|
||||
origin: https://pkg.oracle.com/solaris/support/
|
||||
|
||||
# Configure a publisher for locally-produced packages:
|
||||
- pkg5_publisher:
|
||||
name: site
|
||||
origin: 'https://pkg.example.com/site/'
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(required=True, aliases=['publisher']),
|
||||
state=dict(default='present', choices=['present', 'absent']),
|
||||
sticky=dict(type='bool'),
|
||||
enabled=dict(type='bool'),
|
||||
# search_after=dict(),
|
||||
# search_before=dict(),
|
||||
origin=dict(type='list'),
|
||||
mirror=dict(type='list'),
|
||||
)
|
||||
)
|
||||
|
||||
for option in ['origin', 'mirror']:
|
||||
if module.params[option] == ['']:
|
||||
module.params[option] = []
|
||||
|
||||
if module.params['state'] == 'present':
|
||||
modify_publisher(module, module.params)
|
||||
else:
|
||||
unset_publisher(module, module.params['name'])
|
||||
|
||||
|
||||
def modify_publisher(module, params):
|
||||
name = params['name']
|
||||
existing = get_publishers(module)
|
||||
|
||||
if name in existing:
|
||||
for option in ['origin', 'mirror', 'sticky', 'enabled']:
|
||||
if params[option] is not None:
|
||||
if params[option] != existing[name][option]:
|
||||
return set_publisher(module, params)
|
||||
else:
|
||||
return set_publisher(module, params)
|
||||
|
||||
module.exit_json()
|
||||
|
||||
|
||||
def set_publisher(module, params):
|
||||
name = params['name']
|
||||
args = []
|
||||
|
||||
if params['origin'] is not None:
|
||||
args.append('--remove-origin=*')
|
||||
args.extend(['--add-origin=' + u for u in params['origin']])
|
||||
if params['mirror'] is not None:
|
||||
args.append('--remove-mirror=*')
|
||||
args.extend(['--add-mirror=' + u for u in params['mirror']])
|
||||
|
||||
if params['sticky'] is not None and params['sticky']:
|
||||
args.append('--sticky')
|
||||
elif params['sticky'] is not None:
|
||||
args.append('--non-sticky')
|
||||
|
||||
if params['enabled'] is not None and params['enabled']:
|
||||
args.append('--enable')
|
||||
elif params['enabled'] is not None:
|
||||
args.append('--disable')
|
||||
|
||||
rc, out, err = module.run_command(
|
||||
["pkg", "set-publisher"] + args + [name],
|
||||
check_rc=True
|
||||
)
|
||||
response = {
|
||||
'rc': rc,
|
||||
'results': [out],
|
||||
'msg': err,
|
||||
'changed': True,
|
||||
}
|
||||
if rc != 0:
|
||||
module.fail_json(**response)
|
||||
module.exit_json(**response)
|
||||
|
||||
|
||||
def unset_publisher(module, publisher):
|
||||
if publisher not in get_publishers(module):
|
||||
module.exit_json()
|
||||
|
||||
rc, out, err = module.run_command(
|
||||
["pkg", "unset-publisher", publisher],
|
||||
check_rc=True
|
||||
)
|
||||
response = {
|
||||
'rc': rc,
|
||||
'results': [out],
|
||||
'msg': err,
|
||||
'changed': True,
|
||||
}
|
||||
if rc != 0:
|
||||
module.fail_json(**response)
|
||||
module.exit_json(**response)
|
||||
|
||||
|
||||
def get_publishers(module):
|
||||
rc, out, err = module.run_command(["pkg", "publisher", "-Ftsv"], True)
|
||||
|
||||
lines = out.splitlines()
|
||||
keys = lines.pop(0).lower().split("\t")
|
||||
|
||||
publishers = {}
|
||||
for line in lines:
|
||||
values = dict(zip(keys, map(unstringify, line.split("\t"))))
|
||||
name = values['publisher']
|
||||
|
||||
if name not in publishers:
|
||||
publishers[name] = dict(
|
||||
(k, values[k]) for k in ['sticky', 'enabled']
|
||||
)
|
||||
publishers[name]['origin'] = []
|
||||
publishers[name]['mirror'] = []
|
||||
|
||||
if values['type'] is not None:
|
||||
publishers[name][values['type']].append(values['uri'])
|
||||
|
||||
return publishers
|
||||
|
||||
|
||||
def unstringify(val):
|
||||
if val == "-" or val == '':
|
||||
return None
|
||||
elif val == "true":
|
||||
return True
|
||||
elif val == "false":
|
||||
return False
|
||||
else:
|
||||
return val
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
375
plugins/modules/packaging/os/pkgin.py
Normal file
375
plugins/modules/packaging/os/pkgin.py
Normal file
@@ -0,0 +1,375 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2013 Shaun Zinck <shaun.zinck at gmail.com>
|
||||
# Copyright (c) 2015 Lawrence Leonard Gilbert <larry@L2G.to>
|
||||
# Copyright (c) 2016 Jasper Lievisse Adriaanse <j at jasper.la>
|
||||
#
|
||||
# Written by Shaun Zinck
|
||||
# Based on pacman module written by Afterburn <http://github.com/afterburn>
|
||||
# that was based on apt module written by Matthew Williams <matthew@flowroute.com>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: pkgin
|
||||
short_description: Package manager for SmartOS, NetBSD, et al.
|
||||
description:
|
||||
- "The standard package manager for SmartOS, but also usable on NetBSD
|
||||
or any OS that uses C(pkgsrc). (Home: U(http://pkgin.net/))"
|
||||
author:
|
||||
- "Larry Gilbert (@L2G)"
|
||||
- "Shaun Zinck (@szinck)"
|
||||
- "Jasper Lievisse Adriaanse (@jasperla)"
|
||||
notes:
|
||||
- "Known bug with pkgin < 0.8.0: if a package is removed and another
|
||||
package depends on it, the other package will be silently removed as
|
||||
well. New to Ansible 1.9: check-mode support."
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- Name of package to install/remove;
|
||||
- multiple names may be given, separated by commas
|
||||
state:
|
||||
description:
|
||||
- Intended state of the package
|
||||
choices: [ 'present', 'absent' ]
|
||||
default: present
|
||||
update_cache:
|
||||
description:
|
||||
- Update repository database. Can be run with other steps or on it's own.
|
||||
type: bool
|
||||
default: 'no'
|
||||
upgrade:
|
||||
description:
|
||||
- Upgrade main packages to their newer versions
|
||||
type: bool
|
||||
default: 'no'
|
||||
full_upgrade:
|
||||
description:
|
||||
- Upgrade all packages to their newer versions
|
||||
type: bool
|
||||
default: 'no'
|
||||
clean:
|
||||
description:
|
||||
- Clean packages cache
|
||||
type: bool
|
||||
default: 'no'
|
||||
force:
|
||||
description:
|
||||
- Force package reinstall
|
||||
type: bool
|
||||
default: 'no'
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# install package foo
|
||||
- pkgin:
|
||||
name: foo
|
||||
state: present
|
||||
|
||||
# Update database and install "foo" package
|
||||
- pkgin:
|
||||
name: foo
|
||||
update_cache: yes
|
||||
|
||||
# remove package foo
|
||||
- pkgin:
|
||||
name: foo
|
||||
state: absent
|
||||
|
||||
# remove packages foo and bar
|
||||
- pkgin:
|
||||
name: foo,bar
|
||||
state: absent
|
||||
|
||||
# Update repositories as a separate step
|
||||
- pkgin:
|
||||
update_cache: yes
|
||||
|
||||
# Upgrade main packages (equivalent to C(pkgin upgrade))
|
||||
- pkgin:
|
||||
upgrade: yes
|
||||
|
||||
# Upgrade all packages (equivalent to C(pkgin full-upgrade))
|
||||
- pkgin:
|
||||
full_upgrade: yes
|
||||
|
||||
# Force-upgrade all packages (equivalent to C(pkgin -F full-upgrade))
|
||||
- pkgin:
|
||||
full_upgrade: yes
|
||||
force: yes
|
||||
|
||||
# clean packages cache (equivalent to C(pkgin clean))
|
||||
- pkgin:
|
||||
clean: yes
|
||||
'''
|
||||
|
||||
|
||||
import re
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def query_package(module, name):
|
||||
"""Search for the package by name.
|
||||
|
||||
Possible return values:
|
||||
* "present" - installed, no upgrade needed
|
||||
* "outdated" - installed, but can be upgraded
|
||||
* False - not installed or not found
|
||||
"""
|
||||
|
||||
# test whether '-p' (parsable) flag is supported.
|
||||
rc, out, err = module.run_command("%s -p -v" % PKGIN_PATH)
|
||||
|
||||
if rc == 0:
|
||||
pflag = '-p'
|
||||
splitchar = ';'
|
||||
else:
|
||||
pflag = ''
|
||||
splitchar = ' '
|
||||
|
||||
# Use "pkgin search" to find the package. The regular expression will
|
||||
# only match on the complete name.
|
||||
rc, out, err = module.run_command("%s %s search \"^%s$\"" % (PKGIN_PATH, pflag, name))
|
||||
|
||||
# rc will not be 0 unless the search was a success
|
||||
if rc == 0:
|
||||
|
||||
# Search results may contain more than one line (e.g., 'emacs'), so iterate
|
||||
# through each line to see if we have a match.
|
||||
packages = out.split('\n')
|
||||
|
||||
for package in packages:
|
||||
|
||||
# Break up line at spaces. The first part will be the package with its
|
||||
# version (e.g. 'gcc47-libs-4.7.2nb4'), and the second will be the state
|
||||
# of the package:
|
||||
# '' - not installed
|
||||
# '<' - installed but out of date
|
||||
# '=' - installed and up to date
|
||||
# '>' - installed but newer than the repository version
|
||||
pkgname_with_version, raw_state = package.split(splitchar)[0:2]
|
||||
|
||||
# Search for package, stripping version
|
||||
# (results in sth like 'gcc47-libs' or 'emacs24-nox11')
|
||||
pkg_search_obj = re.search(r'^(.*?)\-[0-9][0-9.]*(nb[0-9]+)*', pkgname_with_version, re.M)
|
||||
|
||||
# Do not proceed unless we have a match
|
||||
if not pkg_search_obj:
|
||||
continue
|
||||
|
||||
# Grab matched string
|
||||
pkgname_without_version = pkg_search_obj.group(1)
|
||||
|
||||
if name != pkgname_without_version:
|
||||
continue
|
||||
|
||||
# The package was found; now return its state
|
||||
if raw_state == '<':
|
||||
return 'outdated'
|
||||
elif raw_state == '=' or raw_state == '>':
|
||||
return 'present'
|
||||
else:
|
||||
return False
|
||||
# no fall-through
|
||||
|
||||
# No packages were matched, so return False
|
||||
return False
|
||||
|
||||
|
||||
def format_action_message(module, action, count):
|
||||
vars = {"actioned": action,
|
||||
"count": count}
|
||||
|
||||
if module.check_mode:
|
||||
message = "would have %(actioned)s %(count)d package" % vars
|
||||
else:
|
||||
message = "%(actioned)s %(count)d package" % vars
|
||||
|
||||
if count == 1:
|
||||
return message
|
||||
else:
|
||||
return message + "s"
|
||||
|
||||
|
||||
def format_pkgin_command(module, command, package=None):
|
||||
# Not all commands take a package argument, so cover this up by passing
|
||||
# an empty string. Some commands (e.g. 'update') will ignore extra
|
||||
# arguments, however this behaviour cannot be relied on for others.
|
||||
if package is None:
|
||||
package = ""
|
||||
|
||||
if module.params["force"]:
|
||||
force = "-F"
|
||||
else:
|
||||
force = ""
|
||||
|
||||
vars = {"pkgin": PKGIN_PATH,
|
||||
"command": command,
|
||||
"package": package,
|
||||
"force": force}
|
||||
|
||||
if module.check_mode:
|
||||
return "%(pkgin)s -n %(command)s %(package)s" % vars
|
||||
else:
|
||||
return "%(pkgin)s -y %(force)s %(command)s %(package)s" % vars
|
||||
|
||||
|
||||
def remove_packages(module, packages):
|
||||
|
||||
remove_c = 0
|
||||
|
||||
# Using a for loop in case of error, we can report the package that failed
|
||||
for package in packages:
|
||||
# Query the package first, to see if we even need to remove
|
||||
if not query_package(module, package):
|
||||
continue
|
||||
|
||||
rc, out, err = module.run_command(
|
||||
format_pkgin_command(module, "remove", package))
|
||||
|
||||
if not module.check_mode and query_package(module, package):
|
||||
module.fail_json(msg="failed to remove %s: %s" % (package, out))
|
||||
|
||||
remove_c += 1
|
||||
|
||||
if remove_c > 0:
|
||||
module.exit_json(changed=True, msg=format_action_message(module, "removed", remove_c))
|
||||
|
||||
module.exit_json(changed=False, msg="package(s) already absent")
|
||||
|
||||
|
||||
def install_packages(module, packages):
|
||||
|
||||
install_c = 0
|
||||
|
||||
for package in packages:
|
||||
if query_package(module, package):
|
||||
continue
|
||||
|
||||
rc, out, err = module.run_command(
|
||||
format_pkgin_command(module, "install", package))
|
||||
|
||||
if not module.check_mode and not query_package(module, package):
|
||||
module.fail_json(msg="failed to install %s: %s" % (package, out))
|
||||
|
||||
install_c += 1
|
||||
|
||||
if install_c > 0:
|
||||
module.exit_json(changed=True, msg=format_action_message(module, "installed", install_c))
|
||||
|
||||
module.exit_json(changed=False, msg="package(s) already present")
|
||||
|
||||
|
||||
def update_package_db(module):
|
||||
rc, out, err = module.run_command(
|
||||
format_pkgin_command(module, "update"))
|
||||
|
||||
if rc == 0:
|
||||
if re.search('database for.*is up-to-date\n$', out):
|
||||
return False, "database is up-to-date"
|
||||
else:
|
||||
return True, "updated repository database"
|
||||
else:
|
||||
module.fail_json(msg="could not update package db")
|
||||
|
||||
|
||||
def do_upgrade_packages(module, full=False):
|
||||
if full:
|
||||
cmd = "full-upgrade"
|
||||
else:
|
||||
cmd = "upgrade"
|
||||
|
||||
rc, out, err = module.run_command(
|
||||
format_pkgin_command(module, cmd))
|
||||
|
||||
if rc == 0:
|
||||
if re.search('^nothing to do.\n$', out):
|
||||
module.exit_json(changed=False, msg="nothing left to upgrade")
|
||||
else:
|
||||
module.fail_json(msg="could not %s packages" % cmd)
|
||||
|
||||
|
||||
def upgrade_packages(module):
|
||||
do_upgrade_packages(module)
|
||||
|
||||
|
||||
def full_upgrade_packages(module):
|
||||
do_upgrade_packages(module, True)
|
||||
|
||||
|
||||
def clean_cache(module):
|
||||
rc, out, err = module.run_command(
|
||||
format_pkgin_command(module, "clean"))
|
||||
|
||||
if rc == 0:
|
||||
# There's no indication if 'clean' actually removed anything,
|
||||
# so assume it did.
|
||||
module.exit_json(changed=True, msg="cleaned caches")
|
||||
else:
|
||||
module.fail_json(msg="could not clean package cache")
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
state=dict(default="present", choices=["present", "absent"]),
|
||||
name=dict(aliases=["pkg"], type='list'),
|
||||
update_cache=dict(default='no', type='bool'),
|
||||
upgrade=dict(default='no', type='bool'),
|
||||
full_upgrade=dict(default='no', type='bool'),
|
||||
clean=dict(default='no', type='bool'),
|
||||
force=dict(default='no', type='bool')),
|
||||
required_one_of=[['name', 'update_cache', 'upgrade', 'full_upgrade', 'clean']],
|
||||
supports_check_mode=True)
|
||||
|
||||
global PKGIN_PATH
|
||||
PKGIN_PATH = module.get_bin_path('pkgin', True, ['/opt/local/bin'])
|
||||
|
||||
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
|
||||
|
||||
p = module.params
|
||||
|
||||
if p["update_cache"]:
|
||||
c, msg = update_package_db(module)
|
||||
if not (p['name'] or p["upgrade"] or p["full_upgrade"]):
|
||||
module.exit_json(changed=c, msg=msg)
|
||||
|
||||
if p["upgrade"]:
|
||||
upgrade_packages(module)
|
||||
if not p['name']:
|
||||
module.exit_json(changed=True, msg='upgraded packages')
|
||||
|
||||
if p["full_upgrade"]:
|
||||
full_upgrade_packages(module)
|
||||
if not p['name']:
|
||||
module.exit_json(changed=True, msg='upgraded all packages')
|
||||
|
||||
if p["clean"]:
|
||||
clean_cache(module)
|
||||
if not p['name']:
|
||||
module.exit_json(changed=True, msg='cleaned caches')
|
||||
|
||||
pkgs = p["name"]
|
||||
|
||||
if p["state"] == "present":
|
||||
install_packages(module, pkgs)
|
||||
|
||||
elif p["state"] == "absent":
|
||||
remove_packages(module, pkgs)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
401
plugins/modules/packaging/os/pkgng.py
Normal file
401
plugins/modules/packaging/os/pkgng.py
Normal file
@@ -0,0 +1,401 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, bleader
|
||||
# Written by bleader <bleader@ratonland.org>
|
||||
# Based on pkgin module written by Shaun Zinck <shaun.zinck at gmail.com>
|
||||
# that was based on pacman module written by Afterburn <https://github.com/afterburn>
|
||||
# that was based on apt module written by Matthew Williams <matthew@flowroute.com>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: pkgng
|
||||
short_description: Package manager for FreeBSD >= 9.0
|
||||
description:
|
||||
- Manage binary packages for FreeBSD using 'pkgng' which is available in versions after 9.0.
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- Name or list of names of packages to install/remove.
|
||||
required: true
|
||||
state:
|
||||
description:
|
||||
- State of the package.
|
||||
- 'Note: "latest" added in 2.7'
|
||||
choices: [ 'present', 'latest', 'absent' ]
|
||||
required: false
|
||||
default: present
|
||||
cached:
|
||||
description:
|
||||
- Use local package base instead of fetching an updated one.
|
||||
type: bool
|
||||
required: false
|
||||
default: no
|
||||
annotation:
|
||||
description:
|
||||
- A comma-separated list of keyvalue-pairs of the form
|
||||
C(<+/-/:><key>[=<value>]). A C(+) denotes adding an annotation, a
|
||||
C(-) denotes removing an annotation, and C(:) denotes modifying an
|
||||
annotation.
|
||||
If setting or modifying annotations, a value must be provided.
|
||||
required: false
|
||||
pkgsite:
|
||||
description:
|
||||
- For pkgng versions before 1.1.4, specify packagesite to use
|
||||
for downloading packages. If not specified, use settings from
|
||||
C(/usr/local/etc/pkg.conf).
|
||||
- For newer pkgng versions, specify a the name of a repository
|
||||
configured in C(/usr/local/etc/pkg/repos).
|
||||
required: false
|
||||
rootdir:
|
||||
description:
|
||||
- For pkgng versions 1.5 and later, pkg will install all packages
|
||||
within the specified root directory.
|
||||
- Can not be used together with I(chroot) or I(jail) options.
|
||||
required: false
|
||||
chroot:
|
||||
description:
|
||||
- Pkg will chroot in the specified environment.
|
||||
- Can not be used together with I(rootdir) or I(jail) options.
|
||||
required: false
|
||||
jail:
|
||||
description:
|
||||
- Pkg will execute in the given jail name or id.
|
||||
- Can not be used together with I(chroot) or I(rootdir) options.
|
||||
autoremove:
|
||||
description:
|
||||
- Remove automatically installed packages which are no longer needed.
|
||||
required: false
|
||||
type: bool
|
||||
default: no
|
||||
author: "bleader (@bleader)"
|
||||
notes:
|
||||
- When using pkgsite, be careful that already in cache packages won't be downloaded again.
|
||||
- When used with a `loop:` each package will be processed individually,
|
||||
it is much more efficient to pass the list directly to the `name` option.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Install package foo
|
||||
pkgng:
|
||||
name: foo
|
||||
state: present
|
||||
|
||||
- name: Annotate package foo and bar
|
||||
pkgng:
|
||||
name: foo,bar
|
||||
annotation: '+test1=baz,-test2,:test3=foobar'
|
||||
|
||||
- name: Remove packages foo and bar
|
||||
pkgng:
|
||||
name: foo,bar
|
||||
state: absent
|
||||
|
||||
# "latest" support added in 2.7
|
||||
- name: Upgrade package baz
|
||||
pkgng:
|
||||
name: baz
|
||||
state: latest
|
||||
'''
|
||||
|
||||
|
||||
import re
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def query_package(module, pkgng_path, name, dir_arg):
|
||||
|
||||
rc, out, err = module.run_command("%s %s info -g -e %s" % (pkgng_path, dir_arg, name))
|
||||
|
||||
if rc == 0:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def query_update(module, pkgng_path, name, dir_arg, old_pkgng, pkgsite):
|
||||
|
||||
# Check to see if a package upgrade is available.
|
||||
# rc = 0, no updates available or package not installed
|
||||
# rc = 1, updates available
|
||||
if old_pkgng:
|
||||
rc, out, err = module.run_command("%s %s upgrade -g -n %s" % (pkgsite, pkgng_path, name))
|
||||
else:
|
||||
rc, out, err = module.run_command("%s %s upgrade %s -g -n %s" % (pkgng_path, dir_arg, pkgsite, name))
|
||||
|
||||
if rc == 1:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def pkgng_older_than(module, pkgng_path, compare_version):
|
||||
|
||||
rc, out, err = module.run_command("%s -v" % pkgng_path)
|
||||
version = [int(x) for x in re.split(r'[\._]', out)]
|
||||
|
||||
i = 0
|
||||
new_pkgng = True
|
||||
while compare_version[i] == version[i]:
|
||||
i += 1
|
||||
if i == min(len(compare_version), len(version)):
|
||||
break
|
||||
else:
|
||||
if compare_version[i] > version[i]:
|
||||
new_pkgng = False
|
||||
return not new_pkgng
|
||||
|
||||
|
||||
def remove_packages(module, pkgng_path, packages, dir_arg):
|
||||
|
||||
remove_c = 0
|
||||
# Using a for loop in case of error, we can report the package that failed
|
||||
for package in packages:
|
||||
# Query the package first, to see if we even need to remove
|
||||
if not query_package(module, pkgng_path, package, dir_arg):
|
||||
continue
|
||||
|
||||
if not module.check_mode:
|
||||
rc, out, err = module.run_command("%s %s delete -y %s" % (pkgng_path, dir_arg, package))
|
||||
|
||||
if not module.check_mode and query_package(module, pkgng_path, package, dir_arg):
|
||||
module.fail_json(msg="failed to remove %s: %s" % (package, out))
|
||||
|
||||
remove_c += 1
|
||||
|
||||
if remove_c > 0:
|
||||
|
||||
return (True, "removed %s package(s)" % remove_c)
|
||||
|
||||
return (False, "package(s) already absent")
|
||||
|
||||
|
||||
def install_packages(module, pkgng_path, packages, cached, pkgsite, dir_arg, state):
|
||||
|
||||
install_c = 0
|
||||
|
||||
# as of pkg-1.1.4, PACKAGESITE is deprecated in favor of repository definitions
|
||||
# in /usr/local/etc/pkg/repos
|
||||
old_pkgng = pkgng_older_than(module, pkgng_path, [1, 1, 4])
|
||||
if pkgsite != "":
|
||||
if old_pkgng:
|
||||
pkgsite = "PACKAGESITE=%s" % (pkgsite)
|
||||
else:
|
||||
pkgsite = "-r %s" % (pkgsite)
|
||||
|
||||
# This environment variable skips mid-install prompts,
|
||||
# setting them to their default values.
|
||||
batch_var = 'env BATCH=yes'
|
||||
|
||||
if not module.check_mode and not cached:
|
||||
if old_pkgng:
|
||||
rc, out, err = module.run_command("%s %s update" % (pkgsite, pkgng_path))
|
||||
else:
|
||||
rc, out, err = module.run_command("%s %s update" % (pkgng_path, dir_arg))
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Could not update catalogue [%d]: %s %s" % (rc, out, err))
|
||||
|
||||
for package in packages:
|
||||
already_installed = query_package(module, pkgng_path, package, dir_arg)
|
||||
if already_installed and state == "present":
|
||||
continue
|
||||
|
||||
update_available = query_update(module, pkgng_path, package, dir_arg, old_pkgng, pkgsite)
|
||||
if not update_available and already_installed and state == "latest":
|
||||
continue
|
||||
|
||||
if not module.check_mode:
|
||||
if already_installed:
|
||||
action = "upgrade"
|
||||
else:
|
||||
action = "install"
|
||||
if old_pkgng:
|
||||
rc, out, err = module.run_command("%s %s %s %s -g -U -y %s" % (batch_var, pkgsite, pkgng_path, action, package))
|
||||
else:
|
||||
rc, out, err = module.run_command("%s %s %s %s %s -g -U -y %s" % (batch_var, pkgng_path, dir_arg, action, pkgsite, package))
|
||||
|
||||
if not module.check_mode and not query_package(module, pkgng_path, package, dir_arg):
|
||||
module.fail_json(msg="failed to %s %s: %s" % (action, package, out), stderr=err)
|
||||
|
||||
install_c += 1
|
||||
|
||||
if install_c > 0:
|
||||
return (True, "added %s package(s)" % (install_c))
|
||||
|
||||
return (False, "package(s) already %s" % (state))
|
||||
|
||||
|
||||
def annotation_query(module, pkgng_path, package, tag, dir_arg):
|
||||
rc, out, err = module.run_command("%s %s info -g -A %s" % (pkgng_path, dir_arg, package))
|
||||
match = re.search(r'^\s*(?P<tag>%s)\s*:\s*(?P<value>\w+)' % tag, out, flags=re.MULTILINE)
|
||||
if match:
|
||||
return match.group('value')
|
||||
return False
|
||||
|
||||
|
||||
def annotation_add(module, pkgng_path, package, tag, value, dir_arg):
|
||||
_value = annotation_query(module, pkgng_path, package, tag, dir_arg)
|
||||
if not _value:
|
||||
# Annotation does not exist, add it.
|
||||
rc, out, err = module.run_command('%s %s annotate -y -A %s %s "%s"'
|
||||
% (pkgng_path, dir_arg, package, tag, value))
|
||||
if rc != 0:
|
||||
module.fail_json(msg="could not annotate %s: %s"
|
||||
% (package, out), stderr=err)
|
||||
return True
|
||||
elif _value != value:
|
||||
# Annotation exists, but value differs
|
||||
module.fail_json(
|
||||
mgs="failed to annotate %s, because %s is already set to %s, but should be set to %s"
|
||||
% (package, tag, _value, value))
|
||||
return False
|
||||
else:
|
||||
# Annotation exists, nothing to do
|
||||
return False
|
||||
|
||||
|
||||
def annotation_delete(module, pkgng_path, package, tag, value, dir_arg):
|
||||
_value = annotation_query(module, pkgng_path, package, tag, dir_arg)
|
||||
if _value:
|
||||
rc, out, err = module.run_command('%s %s annotate -y -D %s %s'
|
||||
% (pkgng_path, dir_arg, package, tag))
|
||||
if rc != 0:
|
||||
module.fail_json(msg="could not delete annotation to %s: %s"
|
||||
% (package, out), stderr=err)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def annotation_modify(module, pkgng_path, package, tag, value, dir_arg):
|
||||
_value = annotation_query(module, pkgng_path, package, tag, dir_arg)
|
||||
if not value:
|
||||
# No such tag
|
||||
module.fail_json(msg="could not change annotation to %s: tag %s does not exist"
|
||||
% (package, tag))
|
||||
elif _value == value:
|
||||
# No change in value
|
||||
return False
|
||||
else:
|
||||
rc, out, err = module.run_command('%s %s annotate -y -M %s %s "%s"'
|
||||
% (pkgng_path, dir_arg, package, tag, value))
|
||||
if rc != 0:
|
||||
module.fail_json(msg="could not change annotation annotation to %s: %s"
|
||||
% (package, out), stderr=err)
|
||||
return True
|
||||
|
||||
|
||||
def annotate_packages(module, pkgng_path, packages, annotation, dir_arg):
|
||||
annotate_c = 0
|
||||
annotations = map(lambda _annotation:
|
||||
re.match(r'(?P<operation>[\+-:])(?P<tag>\w+)(=(?P<value>\w+))?',
|
||||
_annotation).groupdict(),
|
||||
re.split(r',', annotation))
|
||||
|
||||
operation = {
|
||||
'+': annotation_add,
|
||||
'-': annotation_delete,
|
||||
':': annotation_modify
|
||||
}
|
||||
|
||||
for package in packages:
|
||||
for _annotation in annotations:
|
||||
if operation[_annotation['operation']](module, pkgng_path, package, _annotation['tag'], _annotation['value']):
|
||||
annotate_c += 1
|
||||
|
||||
if annotate_c > 0:
|
||||
return (True, "added %s annotations." % annotate_c)
|
||||
return (False, "changed no annotations")
|
||||
|
||||
|
||||
def autoremove_packages(module, pkgng_path, dir_arg):
|
||||
rc, out, err = module.run_command("%s %s autoremove -n" % (pkgng_path, dir_arg))
|
||||
|
||||
autoremove_c = 0
|
||||
|
||||
match = re.search('^Deinstallation has been requested for the following ([0-9]+) packages', out, re.MULTILINE)
|
||||
if match:
|
||||
autoremove_c = int(match.group(1))
|
||||
|
||||
if autoremove_c == 0:
|
||||
return False, "no package(s) to autoremove"
|
||||
|
||||
if not module.check_mode:
|
||||
rc, out, err = module.run_command("%s %s autoremove -y" % (pkgng_path, dir_arg))
|
||||
|
||||
return True, "autoremoved %d package(s)" % (autoremove_c)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
state=dict(default="present", choices=["present", "latest", "absent"], required=False),
|
||||
name=dict(aliases=["pkg"], required=True, type='list'),
|
||||
cached=dict(default=False, type='bool'),
|
||||
annotation=dict(default="", required=False),
|
||||
pkgsite=dict(default="", required=False),
|
||||
rootdir=dict(default="", required=False, type='path'),
|
||||
chroot=dict(default="", required=False, type='path'),
|
||||
jail=dict(default="", required=False, type='str'),
|
||||
autoremove=dict(default=False, type='bool')),
|
||||
supports_check_mode=True,
|
||||
mutually_exclusive=[["rootdir", "chroot", "jail"]])
|
||||
|
||||
pkgng_path = module.get_bin_path('pkg', True)
|
||||
|
||||
p = module.params
|
||||
|
||||
pkgs = p["name"]
|
||||
|
||||
changed = False
|
||||
msgs = []
|
||||
dir_arg = ""
|
||||
|
||||
if p["rootdir"] != "":
|
||||
old_pkgng = pkgng_older_than(module, pkgng_path, [1, 5, 0])
|
||||
if old_pkgng:
|
||||
module.fail_json(msg="To use option 'rootdir' pkg version must be 1.5 or greater")
|
||||
else:
|
||||
dir_arg = "--rootdir %s" % (p["rootdir"])
|
||||
|
||||
if p["chroot"] != "":
|
||||
dir_arg = '--chroot %s' % (p["chroot"])
|
||||
|
||||
if p["jail"] != "":
|
||||
dir_arg = '--jail %s' % (p["jail"])
|
||||
|
||||
if p["state"] in ("present", "latest"):
|
||||
_changed, _msg = install_packages(module, pkgng_path, pkgs, p["cached"], p["pkgsite"], dir_arg, p["state"])
|
||||
changed = changed or _changed
|
||||
msgs.append(_msg)
|
||||
|
||||
elif p["state"] == "absent":
|
||||
_changed, _msg = remove_packages(module, pkgng_path, pkgs, dir_arg)
|
||||
changed = changed or _changed
|
||||
msgs.append(_msg)
|
||||
|
||||
if p["autoremove"]:
|
||||
_changed, _msg = autoremove_packages(module, pkgng_path, dir_arg)
|
||||
changed = changed or _changed
|
||||
msgs.append(_msg)
|
||||
|
||||
if p["annotation"]:
|
||||
_changed, _msg = annotate_packages(module, pkgng_path, pkgs, p["annotation"], dir_arg)
|
||||
changed = changed or _changed
|
||||
msgs.append(_msg)
|
||||
|
||||
module.exit_json(changed=changed, msg=", ".join(msgs))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
228
plugins/modules/packaging/os/pkgutil.py
Normal file
228
plugins/modules/packaging/os/pkgutil.py
Normal file
@@ -0,0 +1,228 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, Alexander Winkler <mail () winkler-alexander.de>
|
||||
# based on svr4pkg by
|
||||
# Boyd Adamson <boyd () boydadamson.com> (2012)
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['stableinterface'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: pkgutil
|
||||
short_description: Manage CSW-Packages on Solaris
|
||||
description:
|
||||
- Manages CSW packages (SVR4 format) on Solaris 10 and 11.
|
||||
- These were the native packages on Solaris <= 10 and are available
|
||||
as a legacy feature in Solaris 11.
|
||||
- Pkgutil is an advanced packaging system, which resolves dependency on installation.
|
||||
It is designed for CSW packages.
|
||||
author: "Alexander Winkler (@dermute)"
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- Package name, e.g. (C(CSWnrpe))
|
||||
required: true
|
||||
site:
|
||||
description:
|
||||
- Specifies the repository path to install the package from.
|
||||
- Its global definition is done in C(/etc/opt/csw/pkgutil.conf).
|
||||
required: false
|
||||
state:
|
||||
description:
|
||||
- Whether to install (C(present)), or remove (C(absent)) a package.
|
||||
- The upgrade (C(latest)) operation will update/install the package to the latest version available.
|
||||
- "Note: The module has a limitation that (C(latest)) only works for one package, not lists of them."
|
||||
required: true
|
||||
choices: ["present", "absent", "latest"]
|
||||
update_catalog:
|
||||
description:
|
||||
- If you want to refresh your catalog from the mirror, set this to (C(yes)).
|
||||
required: false
|
||||
default: False
|
||||
type: bool
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Install a package
|
||||
- pkgutil:
|
||||
name: CSWcommon
|
||||
state: present
|
||||
|
||||
# Install a package from a specific repository
|
||||
- pkgutil:
|
||||
name: CSWnrpe
|
||||
site: 'ftp://myinternal.repo/opencsw/kiel'
|
||||
state: latest
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def package_installed(module, name):
|
||||
cmd = ['pkginfo']
|
||||
cmd.append('-q')
|
||||
cmd.append(name)
|
||||
rc, out, err = run_command(module, cmd)
|
||||
if rc == 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def package_latest(module, name, site):
|
||||
# Only supports one package
|
||||
cmd = ['pkgutil', '-U', '--single', '-c']
|
||||
if site is not None:
|
||||
cmd += ['-t', site]
|
||||
cmd.append(name)
|
||||
rc, out, err = run_command(module, cmd)
|
||||
# replace | tail -1 |grep -v SAME
|
||||
# use -2, because splitting on \n create a empty line
|
||||
# at the end of the list
|
||||
return 'SAME' in out.split('\n')[-2]
|
||||
|
||||
|
||||
def run_command(module, cmd, **kwargs):
|
||||
progname = cmd[0]
|
||||
cmd[0] = module.get_bin_path(progname, True, ['/opt/csw/bin'])
|
||||
return module.run_command(cmd, **kwargs)
|
||||
|
||||
|
||||
def package_install(module, state, name, site, update_catalog):
|
||||
cmd = ['pkgutil', '-iy']
|
||||
if update_catalog:
|
||||
cmd += ['-U']
|
||||
if site is not None:
|
||||
cmd += ['-t', site]
|
||||
if state == 'latest':
|
||||
cmd += ['-f']
|
||||
cmd.append(name)
|
||||
(rc, out, err) = run_command(module, cmd)
|
||||
return (rc, out, err)
|
||||
|
||||
|
||||
def package_upgrade(module, name, site, update_catalog):
|
||||
cmd = ['pkgutil', '-ufy']
|
||||
if update_catalog:
|
||||
cmd += ['-U']
|
||||
if site is not None:
|
||||
cmd += ['-t', site]
|
||||
cmd.append(name)
|
||||
(rc, out, err) = run_command(module, cmd)
|
||||
return (rc, out, err)
|
||||
|
||||
|
||||
def package_uninstall(module, name):
|
||||
cmd = ['pkgutil', '-ry', name]
|
||||
(rc, out, err) = run_command(module, cmd)
|
||||
return (rc, out, err)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(required=True),
|
||||
state=dict(required=True, choices=['present', 'absent', 'latest']),
|
||||
site=dict(default=None),
|
||||
update_catalog=dict(required=False, default=False, type='bool'),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
name = module.params['name']
|
||||
state = module.params['state']
|
||||
site = module.params['site']
|
||||
update_catalog = module.params['update_catalog']
|
||||
rc = None
|
||||
out = ''
|
||||
err = ''
|
||||
result = {}
|
||||
result['name'] = name
|
||||
result['state'] = state
|
||||
|
||||
if state == 'present':
|
||||
if not package_installed(module, name):
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
(rc, out, err) = package_install(module, state, name, site, update_catalog)
|
||||
# Stdout is normally empty but for some packages can be
|
||||
# very long and is not often useful
|
||||
if len(out) > 75:
|
||||
out = out[:75] + '...'
|
||||
if rc != 0:
|
||||
if err:
|
||||
msg = err
|
||||
else:
|
||||
msg = out
|
||||
module.fail_json(msg=msg)
|
||||
|
||||
elif state == 'latest':
|
||||
if not package_installed(module, name):
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
(rc, out, err) = package_install(module, state, name, site, update_catalog)
|
||||
if len(out) > 75:
|
||||
out = out[:75] + '...'
|
||||
if rc != 0:
|
||||
if err:
|
||||
msg = err
|
||||
else:
|
||||
msg = out
|
||||
module.fail_json(msg=msg)
|
||||
|
||||
else:
|
||||
if not package_latest(module, name, site):
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
(rc, out, err) = package_upgrade(module, name, site, update_catalog)
|
||||
if len(out) > 75:
|
||||
out = out[:75] + '...'
|
||||
if rc != 0:
|
||||
if err:
|
||||
msg = err
|
||||
else:
|
||||
msg = out
|
||||
module.fail_json(msg=msg)
|
||||
|
||||
elif state == 'absent':
|
||||
if package_installed(module, name):
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
(rc, out, err) = package_uninstall(module, name)
|
||||
if len(out) > 75:
|
||||
out = out[:75] + '...'
|
||||
if rc != 0:
|
||||
if err:
|
||||
msg = err
|
||||
else:
|
||||
msg = out
|
||||
module.fail_json(msg=msg)
|
||||
|
||||
if rc is None:
|
||||
# pkgutil was not executed because the package was already present/absent
|
||||
result['changed'] = False
|
||||
elif rc == 0:
|
||||
result['changed'] = True
|
||||
else:
|
||||
result['changed'] = False
|
||||
result['failed'] = True
|
||||
|
||||
if out:
|
||||
result['stdout'] = out
|
||||
if err:
|
||||
result['stderr'] = err
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
527
plugins/modules/packaging/os/portage.py
Normal file
527
plugins/modules/packaging/os/portage.py
Normal file
@@ -0,0 +1,527 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2016, William L Thomson Jr
|
||||
# (c) 2013, Yap Sok Ann
|
||||
# Written by Yap Sok Ann <sokann@gmail.com>
|
||||
# Modified by William L. Thomson Jr. <wlt@o-sinc.com>
|
||||
# Based on apt module written by Matthew Williams <matthew@flowroute.com>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: portage
|
||||
short_description: Package manager for Gentoo
|
||||
description:
|
||||
- Manages Gentoo packages
|
||||
|
||||
options:
|
||||
package:
|
||||
description:
|
||||
- Package atom or set, e.g. C(sys-apps/foo) or C(>foo-2.13) or C(@world)
|
||||
type: list
|
||||
elements: str
|
||||
|
||||
state:
|
||||
description:
|
||||
- State of the package atom
|
||||
default: "present"
|
||||
choices: [ "present", "installed", "emerged", "absent", "removed", "unmerged", "latest" ]
|
||||
|
||||
update:
|
||||
description:
|
||||
- Update packages to the best version available (--update)
|
||||
type: bool
|
||||
default: 'no'
|
||||
|
||||
deep:
|
||||
description:
|
||||
- Consider the entire dependency tree of packages (--deep)
|
||||
type: bool
|
||||
default: 'no'
|
||||
|
||||
newuse:
|
||||
description:
|
||||
- Include installed packages where USE flags have changed (--newuse)
|
||||
type: bool
|
||||
default: 'no'
|
||||
|
||||
changed_use:
|
||||
description:
|
||||
- Include installed packages where USE flags have changed, except when
|
||||
- flags that the user has not enabled are added or removed
|
||||
- (--changed-use)
|
||||
type: bool
|
||||
default: 'no'
|
||||
|
||||
oneshot:
|
||||
description:
|
||||
- Do not add the packages to the world file (--oneshot)
|
||||
type: bool
|
||||
default: 'no'
|
||||
|
||||
noreplace:
|
||||
description:
|
||||
- Do not re-emerge installed packages (--noreplace)
|
||||
type: bool
|
||||
default: 'yes'
|
||||
|
||||
nodeps:
|
||||
description:
|
||||
- Only merge packages but not their dependencies (--nodeps)
|
||||
type: bool
|
||||
default: 'no'
|
||||
|
||||
onlydeps:
|
||||
description:
|
||||
- Only merge packages' dependencies but not the packages (--onlydeps)
|
||||
type: bool
|
||||
default: 'no'
|
||||
|
||||
depclean:
|
||||
description:
|
||||
- Remove packages not needed by explicitly merged packages (--depclean)
|
||||
- If no package is specified, clean up the world's dependencies
|
||||
- Otherwise, --depclean serves as a dependency aware version of --unmerge
|
||||
type: bool
|
||||
default: 'no'
|
||||
|
||||
quiet:
|
||||
description:
|
||||
- Run emerge in quiet mode (--quiet)
|
||||
type: bool
|
||||
default: 'no'
|
||||
|
||||
verbose:
|
||||
description:
|
||||
- Run emerge in verbose mode (--verbose)
|
||||
type: bool
|
||||
default: 'no'
|
||||
|
||||
sync:
|
||||
description:
|
||||
- Sync package repositories first
|
||||
- If yes, perform "emerge --sync"
|
||||
- If web, perform "emerge-webrsync"
|
||||
choices: [ "web", "yes", "no" ]
|
||||
|
||||
getbinpkg:
|
||||
description:
|
||||
- Prefer packages specified at PORTAGE_BINHOST in make.conf
|
||||
type: bool
|
||||
default: 'no'
|
||||
|
||||
usepkgonly:
|
||||
description:
|
||||
- Merge only binaries (no compiling). This sets getbinpkg=yes.
|
||||
type: bool
|
||||
default: 'no'
|
||||
|
||||
keepgoing:
|
||||
description:
|
||||
- Continue as much as possible after an error.
|
||||
type: bool
|
||||
default: 'no'
|
||||
|
||||
jobs:
|
||||
description:
|
||||
- Specifies the number of packages to build simultaneously.
|
||||
- "Since version 2.6: Value of 0 or False resets any previously added"
|
||||
- --jobs setting values
|
||||
|
||||
loadavg:
|
||||
description:
|
||||
- Specifies that no new builds should be started if there are
|
||||
- other builds running and the load average is at least LOAD
|
||||
- "Since version 2.6: Value of 0 or False resets any previously added"
|
||||
- --load-average setting values
|
||||
|
||||
quietbuild:
|
||||
description:
|
||||
- Redirect all build output to logs alone, and do not display it
|
||||
- on stdout (--quiet-build)
|
||||
type: bool
|
||||
default: 'no'
|
||||
|
||||
quietfail:
|
||||
description:
|
||||
- Suppresses display of the build log on stdout (--quiet-fail)
|
||||
- Only the die message and the path of the build log will be
|
||||
- displayed on stdout.
|
||||
type: bool
|
||||
default: 'no'
|
||||
|
||||
requirements: [ gentoolkit ]
|
||||
author:
|
||||
- "William L Thomson Jr (@wltjr)"
|
||||
- "Yap Sok Ann (@sayap)"
|
||||
- "Andrew Udvare (@Tatsh)"
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Make sure package foo is installed
|
||||
- portage:
|
||||
package: foo
|
||||
state: present
|
||||
|
||||
# Make sure package foo is not installed
|
||||
- portage:
|
||||
package: foo
|
||||
state: absent
|
||||
|
||||
# Update package foo to the "latest" version ( os specific alternative to latest )
|
||||
- portage:
|
||||
package: foo
|
||||
update: yes
|
||||
|
||||
# Install package foo using PORTAGE_BINHOST setup
|
||||
- portage:
|
||||
package: foo
|
||||
getbinpkg: yes
|
||||
|
||||
# Re-install world from binary packages only and do not allow any compiling
|
||||
- portage:
|
||||
package: '@world'
|
||||
usepkgonly: yes
|
||||
|
||||
# Sync repositories and update world
|
||||
- portage:
|
||||
package: '@world'
|
||||
update: yes
|
||||
deep: yes
|
||||
sync: yes
|
||||
|
||||
# Remove unneeded packages
|
||||
- portage:
|
||||
depclean: yes
|
||||
|
||||
# Remove package foo if it is not explicitly needed
|
||||
- portage:
|
||||
package: foo
|
||||
state: absent
|
||||
depclean: yes
|
||||
'''
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def query_package(module, package, action):
|
||||
if package.startswith('@'):
|
||||
return query_set(module, package, action)
|
||||
return query_atom(module, package, action)
|
||||
|
||||
|
||||
def query_atom(module, atom, action):
|
||||
cmd = '%s list %s' % (module.equery_path, atom)
|
||||
|
||||
rc, out, err = module.run_command(cmd)
|
||||
return rc == 0
|
||||
|
||||
|
||||
def query_set(module, set, action):
|
||||
system_sets = [
|
||||
'@live-rebuild',
|
||||
'@module-rebuild',
|
||||
'@preserved-rebuild',
|
||||
'@security',
|
||||
'@selected',
|
||||
'@system',
|
||||
'@world',
|
||||
'@x11-module-rebuild',
|
||||
]
|
||||
|
||||
if set in system_sets:
|
||||
if action == 'unmerge':
|
||||
module.fail_json(msg='set %s cannot be removed' % set)
|
||||
return False
|
||||
|
||||
world_sets_path = '/var/lib/portage/world_sets'
|
||||
if not os.path.exists(world_sets_path):
|
||||
return False
|
||||
|
||||
cmd = 'grep %s %s' % (set, world_sets_path)
|
||||
|
||||
rc, out, err = module.run_command(cmd)
|
||||
return rc == 0
|
||||
|
||||
|
||||
def sync_repositories(module, webrsync=False):
|
||||
if module.check_mode:
|
||||
module.exit_json(msg='check mode not supported by sync')
|
||||
|
||||
if webrsync:
|
||||
webrsync_path = module.get_bin_path('emerge-webrsync', required=True)
|
||||
cmd = '%s --quiet' % webrsync_path
|
||||
else:
|
||||
cmd = '%s --sync --quiet --ask=n' % module.emerge_path
|
||||
|
||||
rc, out, err = module.run_command(cmd)
|
||||
if rc != 0:
|
||||
module.fail_json(msg='could not sync package repositories')
|
||||
|
||||
|
||||
# Note: In the 3 functions below, equery is done one-by-one, but emerge is done
|
||||
# in one go. If that is not desirable, split the packages into multiple tasks
|
||||
# instead of joining them together with comma.
|
||||
|
||||
|
||||
def emerge_packages(module, packages):
|
||||
"""Run emerge command against given list of atoms."""
|
||||
p = module.params
|
||||
|
||||
if p['noreplace'] and not (p['update'] or p['state'] == 'latest'):
|
||||
for package in packages:
|
||||
if p['noreplace'] and not query_package(module, package, 'emerge'):
|
||||
break
|
||||
else:
|
||||
module.exit_json(changed=False, msg='Packages already present.')
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True, msg='Packages would be installed.')
|
||||
|
||||
args = []
|
||||
emerge_flags = {
|
||||
'update': '--update',
|
||||
'deep': '--deep',
|
||||
'newuse': '--newuse',
|
||||
'changed_use': '--changed-use',
|
||||
'oneshot': '--oneshot',
|
||||
'noreplace': '--noreplace',
|
||||
'nodeps': '--nodeps',
|
||||
'onlydeps': '--onlydeps',
|
||||
'quiet': '--quiet',
|
||||
'verbose': '--verbose',
|
||||
'getbinpkg': '--getbinpkg',
|
||||
'usepkgonly': '--usepkgonly',
|
||||
'usepkg': '--usepkg',
|
||||
'keepgoing': '--keep-going',
|
||||
'quietbuild': '--quiet-build',
|
||||
'quietfail': '--quiet-fail',
|
||||
}
|
||||
for flag, arg in emerge_flags.items():
|
||||
if p[flag]:
|
||||
args.append(arg)
|
||||
|
||||
if p['state'] and p['state'] == 'latest':
|
||||
args.append("--update")
|
||||
|
||||
if p['usepkg'] and p['usepkgonly']:
|
||||
module.fail_json(msg='Use only one of usepkg, usepkgonly')
|
||||
|
||||
emerge_flags = {
|
||||
'jobs': '--jobs',
|
||||
'loadavg': '--load-average',
|
||||
}
|
||||
|
||||
for flag, arg in emerge_flags.items():
|
||||
flag_val = p[flag]
|
||||
|
||||
if flag_val is None:
|
||||
"""Fallback to default: don't use this argument at all."""
|
||||
continue
|
||||
|
||||
if not flag_val:
|
||||
"""If the value is 0 or 0.0: add the flag, but not the value."""
|
||||
args.append(arg)
|
||||
continue
|
||||
|
||||
"""Add the --flag=value pair."""
|
||||
args.extend((arg, to_native(flag_val)))
|
||||
|
||||
cmd, (rc, out, err) = run_emerge(module, packages, *args)
|
||||
if rc != 0:
|
||||
module.fail_json(
|
||||
cmd=cmd, rc=rc, stdout=out, stderr=err,
|
||||
msg='Packages not installed.',
|
||||
)
|
||||
|
||||
# Check for SSH error with PORTAGE_BINHOST, since rc is still 0 despite
|
||||
# this error
|
||||
if (p['usepkgonly'] or p['getbinpkg']) \
|
||||
and 'Permission denied (publickey).' in err:
|
||||
module.fail_json(
|
||||
cmd=cmd, rc=rc, stdout=out, stderr=err,
|
||||
msg='Please check your PORTAGE_BINHOST configuration in make.conf '
|
||||
'and your SSH authorized_keys file',
|
||||
)
|
||||
|
||||
changed = True
|
||||
for line in out.splitlines():
|
||||
if re.match(r'(?:>+) Emerging (?:binary )?\(1 of', line):
|
||||
msg = 'Packages installed.'
|
||||
break
|
||||
elif module.check_mode and re.match(r'\[(binary|ebuild)', line):
|
||||
msg = 'Packages would be installed.'
|
||||
break
|
||||
else:
|
||||
changed = False
|
||||
msg = 'No packages installed.'
|
||||
|
||||
module.exit_json(
|
||||
changed=changed, cmd=cmd, rc=rc, stdout=out, stderr=err,
|
||||
msg=msg,
|
||||
)
|
||||
|
||||
|
||||
def unmerge_packages(module, packages):
|
||||
p = module.params
|
||||
|
||||
for package in packages:
|
||||
if query_package(module, package, 'unmerge'):
|
||||
break
|
||||
else:
|
||||
module.exit_json(changed=False, msg='Packages already absent.')
|
||||
|
||||
args = ['--unmerge']
|
||||
|
||||
for flag in ['quiet', 'verbose']:
|
||||
if p[flag]:
|
||||
args.append('--%s' % flag)
|
||||
|
||||
cmd, (rc, out, err) = run_emerge(module, packages, *args)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(
|
||||
cmd=cmd, rc=rc, stdout=out, stderr=err,
|
||||
msg='Packages not removed.',
|
||||
)
|
||||
|
||||
module.exit_json(
|
||||
changed=True, cmd=cmd, rc=rc, stdout=out, stderr=err,
|
||||
msg='Packages removed.',
|
||||
)
|
||||
|
||||
|
||||
def cleanup_packages(module, packages):
|
||||
p = module.params
|
||||
|
||||
if packages:
|
||||
for package in packages:
|
||||
if query_package(module, package, 'unmerge'):
|
||||
break
|
||||
else:
|
||||
module.exit_json(changed=False, msg='Packages already absent.')
|
||||
|
||||
args = ['--depclean']
|
||||
|
||||
for flag in ['quiet', 'verbose']:
|
||||
if p[flag]:
|
||||
args.append('--%s' % flag)
|
||||
|
||||
cmd, (rc, out, err) = run_emerge(module, packages, *args)
|
||||
if rc != 0:
|
||||
module.fail_json(cmd=cmd, rc=rc, stdout=out, stderr=err)
|
||||
|
||||
removed = 0
|
||||
for line in out.splitlines():
|
||||
if not line.startswith('Number removed:'):
|
||||
continue
|
||||
parts = line.split(':')
|
||||
removed = int(parts[1].strip())
|
||||
changed = removed > 0
|
||||
|
||||
module.exit_json(
|
||||
changed=changed, cmd=cmd, rc=rc, stdout=out, stderr=err,
|
||||
msg='Depclean completed.',
|
||||
)
|
||||
|
||||
|
||||
def run_emerge(module, packages, *args):
|
||||
args = list(args)
|
||||
|
||||
args.append('--ask=n')
|
||||
if module.check_mode:
|
||||
args.append('--pretend')
|
||||
|
||||
cmd = [module.emerge_path] + args + packages
|
||||
return cmd, module.run_command(cmd)
|
||||
|
||||
|
||||
portage_present_states = ['present', 'emerged', 'installed', 'latest']
|
||||
portage_absent_states = ['absent', 'unmerged', 'removed']
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
package=dict(type='list', elements='str', default=None, aliases=['name']),
|
||||
state=dict(
|
||||
default=portage_present_states[0],
|
||||
choices=portage_present_states + portage_absent_states,
|
||||
),
|
||||
update=dict(default=False, type='bool'),
|
||||
deep=dict(default=False, type='bool'),
|
||||
newuse=dict(default=False, type='bool'),
|
||||
changed_use=dict(default=False, type='bool'),
|
||||
oneshot=dict(default=False, type='bool'),
|
||||
noreplace=dict(default=True, type='bool'),
|
||||
nodeps=dict(default=False, type='bool'),
|
||||
onlydeps=dict(default=False, type='bool'),
|
||||
depclean=dict(default=False, type='bool'),
|
||||
quiet=dict(default=False, type='bool'),
|
||||
verbose=dict(default=False, type='bool'),
|
||||
sync=dict(default=None, choices=['yes', 'web', 'no']),
|
||||
getbinpkg=dict(default=False, type='bool'),
|
||||
usepkgonly=dict(default=False, type='bool'),
|
||||
usepkg=dict(default=False, type='bool'),
|
||||
keepgoing=dict(default=False, type='bool'),
|
||||
jobs=dict(default=None, type='int'),
|
||||
loadavg=dict(default=None, type='float'),
|
||||
quietbuild=dict(default=False, type='bool'),
|
||||
quietfail=dict(default=False, type='bool'),
|
||||
),
|
||||
required_one_of=[['package', 'sync', 'depclean']],
|
||||
mutually_exclusive=[
|
||||
['nodeps', 'onlydeps'],
|
||||
['quiet', 'verbose'],
|
||||
['quietbuild', 'verbose'],
|
||||
['quietfail', 'verbose'],
|
||||
],
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
module.emerge_path = module.get_bin_path('emerge', required=True)
|
||||
module.equery_path = module.get_bin_path('equery', required=True)
|
||||
|
||||
p = module.params
|
||||
|
||||
if p['sync'] and p['sync'].strip() != 'no':
|
||||
sync_repositories(module, webrsync=(p['sync'] == 'web'))
|
||||
if not p['package']:
|
||||
module.exit_json(msg='Sync successfully finished.')
|
||||
|
||||
packages = []
|
||||
if p['package']:
|
||||
packages.extend(p['package'])
|
||||
|
||||
if p['depclean']:
|
||||
if packages and p['state'] not in portage_absent_states:
|
||||
module.fail_json(
|
||||
msg='Depclean can only be used with package when the state is '
|
||||
'one of: %s' % portage_absent_states,
|
||||
)
|
||||
|
||||
cleanup_packages(module, packages)
|
||||
|
||||
elif p['state'] in portage_present_states:
|
||||
emerge_packages(module, packages)
|
||||
|
||||
elif p['state'] in portage_absent_states:
|
||||
unmerge_packages(module, packages)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
212
plugins/modules/packaging/os/portinstall.py
Normal file
212
plugins/modules/packaging/os/portinstall.py
Normal file
@@ -0,0 +1,212 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, berenddeboer
|
||||
# Written by berenddeboer <berend@pobox.com>
|
||||
# Based on pkgng module written by bleader <bleader at ratonland.org>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: portinstall
|
||||
short_description: Installing packages from FreeBSD's ports system
|
||||
description:
|
||||
- Manage packages for FreeBSD using 'portinstall'.
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- name of package to install/remove
|
||||
required: true
|
||||
state:
|
||||
description:
|
||||
- state of the package
|
||||
choices: [ 'present', 'absent' ]
|
||||
required: false
|
||||
default: present
|
||||
use_packages:
|
||||
description:
|
||||
- use packages instead of ports whenever available
|
||||
type: bool
|
||||
required: false
|
||||
default: yes
|
||||
author: "berenddeboer (@berenddeboer)"
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Install package foo
|
||||
- portinstall:
|
||||
name: foo
|
||||
state: present
|
||||
|
||||
# Install package security/cyrus-sasl2-saslauthd
|
||||
- portinstall:
|
||||
name: security/cyrus-sasl2-saslauthd
|
||||
state: present
|
||||
|
||||
# Remove packages foo and bar
|
||||
- portinstall:
|
||||
name: foo,bar
|
||||
state: absent
|
||||
'''
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six.moves import shlex_quote
|
||||
|
||||
|
||||
def query_package(module, name):
|
||||
|
||||
pkg_info_path = module.get_bin_path('pkg_info', False)
|
||||
|
||||
# Assume that if we have pkg_info, we haven't upgraded to pkgng
|
||||
if pkg_info_path:
|
||||
pkgng = False
|
||||
pkg_glob_path = module.get_bin_path('pkg_glob', True)
|
||||
rc, out, err = module.run_command("%s -e `pkg_glob %s`" % (pkg_info_path, shlex_quote(name)), use_unsafe_shell=True)
|
||||
else:
|
||||
pkgng = True
|
||||
pkg_info_path = module.get_bin_path('pkg', True)
|
||||
pkg_info_path = pkg_info_path + " info"
|
||||
rc, out, err = module.run_command("%s %s" % (pkg_info_path, name))
|
||||
|
||||
found = rc == 0
|
||||
|
||||
if not found:
|
||||
# databases/mysql55-client installs as mysql-client, so try solving
|
||||
# that the ugly way. Pity FreeBSD doesn't have a fool proof way of checking
|
||||
# some package is installed
|
||||
name_without_digits = re.sub('[0-9]', '', name)
|
||||
if name != name_without_digits:
|
||||
if pkgng:
|
||||
rc, out, err = module.run_command("%s %s" % (pkg_info_path, name_without_digits))
|
||||
else:
|
||||
rc, out, err = module.run_command("%s %s" % (pkg_info_path, name_without_digits))
|
||||
|
||||
found = rc == 0
|
||||
|
||||
return found
|
||||
|
||||
|
||||
def matching_packages(module, name):
|
||||
|
||||
ports_glob_path = module.get_bin_path('ports_glob', True)
|
||||
rc, out, err = module.run_command("%s %s" % (ports_glob_path, name))
|
||||
# counts the number of packages found
|
||||
occurrences = out.count('\n')
|
||||
if occurrences == 0:
|
||||
name_without_digits = re.sub('[0-9]', '', name)
|
||||
if name != name_without_digits:
|
||||
rc, out, err = module.run_command("%s %s" % (ports_glob_path, name_without_digits))
|
||||
occurrences = out.count('\n')
|
||||
return occurrences
|
||||
|
||||
|
||||
def remove_packages(module, packages):
|
||||
|
||||
remove_c = 0
|
||||
pkg_glob_path = module.get_bin_path('pkg_glob', True)
|
||||
|
||||
# If pkg_delete not found, we assume pkgng
|
||||
pkg_delete_path = module.get_bin_path('pkg_delete', False)
|
||||
if not pkg_delete_path:
|
||||
pkg_delete_path = module.get_bin_path('pkg', True)
|
||||
pkg_delete_path = pkg_delete_path + " delete -y"
|
||||
|
||||
# Using a for loop in case of error, we can report the package that failed
|
||||
for package in packages:
|
||||
# Query the package first, to see if we even need to remove
|
||||
if not query_package(module, package):
|
||||
continue
|
||||
|
||||
rc, out, err = module.run_command("%s `%s %s`" % (pkg_delete_path, pkg_glob_path, shlex_quote(package)), use_unsafe_shell=True)
|
||||
|
||||
if query_package(module, package):
|
||||
name_without_digits = re.sub('[0-9]', '', package)
|
||||
rc, out, err = module.run_command("%s `%s %s`" % (pkg_delete_path, pkg_glob_path,
|
||||
shlex_quote(name_without_digits)),
|
||||
use_unsafe_shell=True)
|
||||
if query_package(module, package):
|
||||
module.fail_json(msg="failed to remove %s: %s" % (package, out))
|
||||
|
||||
remove_c += 1
|
||||
|
||||
if remove_c > 0:
|
||||
|
||||
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
|
||||
|
||||
module.exit_json(changed=False, msg="package(s) already absent")
|
||||
|
||||
|
||||
def install_packages(module, packages, use_packages):
|
||||
|
||||
install_c = 0
|
||||
|
||||
# If portinstall not found, automagically install
|
||||
portinstall_path = module.get_bin_path('portinstall', False)
|
||||
if not portinstall_path:
|
||||
pkg_path = module.get_bin_path('pkg', False)
|
||||
if pkg_path:
|
||||
module.run_command("pkg install -y portupgrade")
|
||||
portinstall_path = module.get_bin_path('portinstall', True)
|
||||
|
||||
if use_packages:
|
||||
portinstall_params = "--use-packages"
|
||||
else:
|
||||
portinstall_params = ""
|
||||
|
||||
for package in packages:
|
||||
if query_package(module, package):
|
||||
continue
|
||||
|
||||
# TODO: check how many match
|
||||
matches = matching_packages(module, package)
|
||||
if matches == 1:
|
||||
rc, out, err = module.run_command("%s --batch %s %s" % (portinstall_path, portinstall_params, package))
|
||||
if not query_package(module, package):
|
||||
module.fail_json(msg="failed to install %s: %s" % (package, out))
|
||||
elif matches == 0:
|
||||
module.fail_json(msg="no matches for package %s" % (package))
|
||||
else:
|
||||
module.fail_json(msg="%s matches found for package name %s" % (matches, package))
|
||||
|
||||
install_c += 1
|
||||
|
||||
if install_c > 0:
|
||||
module.exit_json(changed=True, msg="present %s package(s)" % (install_c))
|
||||
|
||||
module.exit_json(changed=False, msg="package(s) already present")
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
state=dict(default="present", choices=["present", "absent"]),
|
||||
name=dict(aliases=["pkg"], required=True),
|
||||
use_packages=dict(type='bool', default='yes')))
|
||||
|
||||
p = module.params
|
||||
|
||||
pkgs = p["name"].split(",")
|
||||
|
||||
if p["state"] == "present":
|
||||
install_packages(module, pkgs, p["use_packages"])
|
||||
|
||||
elif p["state"] == "absent":
|
||||
remove_packages(module, pkgs)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
743
plugins/modules/packaging/os/pulp_repo.py
Normal file
743
plugins/modules/packaging/os/pulp_repo.py
Normal file
@@ -0,0 +1,743 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2016, Joe Adams <@sysadmind>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: pulp_repo
|
||||
author: "Joe Adams (@sysadmind)"
|
||||
short_description: Add or remove Pulp repos from a remote host.
|
||||
description:
|
||||
- Add or remove Pulp repos from a remote host.
|
||||
options:
|
||||
add_export_distributor:
|
||||
description:
|
||||
- Whether or not to add the export distributor to new C(rpm) repositories.
|
||||
type: bool
|
||||
default: 'no'
|
||||
feed:
|
||||
description:
|
||||
- Upstream feed URL to receive updates from.
|
||||
force_basic_auth:
|
||||
description:
|
||||
- httplib2, the library used by the M(uri) module only sends
|
||||
authentication information when a webservice responds to an initial
|
||||
request with a 401 status. Since some basic auth services do not
|
||||
properly send a 401, logins will fail. This option forces the sending of
|
||||
the Basic authentication header upon initial request.
|
||||
type: bool
|
||||
default: 'no'
|
||||
generate_sqlite:
|
||||
description:
|
||||
- Boolean flag to indicate whether sqlite files should be generated during
|
||||
a repository publish.
|
||||
required: false
|
||||
type: bool
|
||||
default: 'no'
|
||||
feed_ca_cert:
|
||||
description:
|
||||
- CA certificate string used to validate the feed source SSL certificate.
|
||||
This can be the file content or the path to the file.
|
||||
The ca_cert alias will be removed in Ansible 2.14.
|
||||
type: str
|
||||
aliases: [ importer_ssl_ca_cert, ca_cert ]
|
||||
feed_client_cert:
|
||||
description:
|
||||
- Certificate used as the client certificate when synchronizing the
|
||||
repository. This is used to communicate authentication information to
|
||||
the feed source. The value to this option must be the full path to the
|
||||
certificate. The specified file may be the certificate itself or a
|
||||
single file containing both the certificate and private key. This can be
|
||||
the file content or the path to the file.
|
||||
- If not specified the default value will come from client_cert. Which will
|
||||
change in Ansible 2.14.
|
||||
type: str
|
||||
aliases: [ importer_ssl_client_cert ]
|
||||
feed_client_key:
|
||||
description:
|
||||
- Private key to the certificate specified in I(importer_ssl_client_cert),
|
||||
assuming it is not included in the certificate file itself. This can be
|
||||
the file content or the path to the file.
|
||||
- If not specified the default value will come from client_key. Which will
|
||||
change in Ansible 2.14.
|
||||
type: str
|
||||
aliases: [ importer_ssl_client_key ]
|
||||
name:
|
||||
description:
|
||||
- Name of the repo to add or remove. This correlates to repo-id in Pulp.
|
||||
required: true
|
||||
proxy_host:
|
||||
description:
|
||||
- Proxy url setting for the pulp repository importer. This is in the
|
||||
format scheme://host.
|
||||
required: false
|
||||
default: null
|
||||
proxy_port:
|
||||
description:
|
||||
- Proxy port setting for the pulp repository importer.
|
||||
required: false
|
||||
default: null
|
||||
proxy_username:
|
||||
description:
|
||||
- Proxy username for the pulp repository importer.
|
||||
required: false
|
||||
default: null
|
||||
proxy_password:
|
||||
description:
|
||||
- Proxy password for the pulp repository importer.
|
||||
required: false
|
||||
default: null
|
||||
publish_distributor:
|
||||
description:
|
||||
- Distributor to use when state is C(publish). The default is to
|
||||
publish all distributors.
|
||||
pulp_host:
|
||||
description:
|
||||
- URL of the pulp server to connect to.
|
||||
default: http://127.0.0.1
|
||||
relative_url:
|
||||
description:
|
||||
- Relative URL for the local repository.
|
||||
required: true
|
||||
repo_type:
|
||||
description:
|
||||
- Repo plugin type to use (i.e. C(rpm), C(docker)).
|
||||
default: rpm
|
||||
repoview:
|
||||
description:
|
||||
- Whether to generate repoview files for a published repository. Setting
|
||||
this to "yes" automatically activates `generate_sqlite`.
|
||||
required: false
|
||||
type: bool
|
||||
default: 'no'
|
||||
serve_http:
|
||||
description:
|
||||
- Make the repo available over HTTP.
|
||||
type: bool
|
||||
default: 'no'
|
||||
serve_https:
|
||||
description:
|
||||
- Make the repo available over HTTPS.
|
||||
type: bool
|
||||
default: 'yes'
|
||||
state:
|
||||
description:
|
||||
- The repo state. A state of C(sync) will queue a sync of the repo.
|
||||
This is asynchronous but not delayed like a scheduled sync. A state of
|
||||
C(publish) will use the repository's distributor to publish the content.
|
||||
default: present
|
||||
choices: [ "present", "absent", "sync", "publish" ]
|
||||
url_password:
|
||||
description:
|
||||
- The password for use in HTTP basic authentication to the pulp API.
|
||||
If the I(url_username) parameter is not specified, the I(url_password)
|
||||
parameter will not be used.
|
||||
url_username:
|
||||
description:
|
||||
- The username for use in HTTP basic authentication to the pulp API.
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be
|
||||
used on personally controlled sites using self-signed certificates.
|
||||
type: bool
|
||||
default: 'yes'
|
||||
wait_for_completion:
|
||||
description:
|
||||
- Wait for asynchronous tasks to complete before returning.
|
||||
type: bool
|
||||
default: 'no'
|
||||
notes:
|
||||
- This module can currently only create distributors and importers on rpm
|
||||
repositories. Contributions to support other repo types are welcome.
|
||||
extends_documentation_fragment:
|
||||
- url
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Create a new repo with name 'my_repo'
|
||||
pulp_repo:
|
||||
name: my_repo
|
||||
relative_url: my/repo
|
||||
state: present
|
||||
|
||||
- name: Create a repo with a feed and a relative URL
|
||||
pulp_repo:
|
||||
name: my_centos_updates
|
||||
repo_type: rpm
|
||||
feed: http://mirror.centos.org/centos/6/updates/x86_64/
|
||||
relative_url: centos/6/updates
|
||||
url_username: admin
|
||||
url_password: admin
|
||||
force_basic_auth: yes
|
||||
state: present
|
||||
|
||||
- name: Remove a repo from the pulp server
|
||||
pulp_repo:
|
||||
name: my_old_repo
|
||||
repo_type: rpm
|
||||
state: absent
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
repo:
|
||||
description: Name of the repo that the action was performed on.
|
||||
returned: success
|
||||
type: str
|
||||
sample: my_repo
|
||||
'''
|
||||
|
||||
import json
|
||||
import os
|
||||
from time import sleep
|
||||
|
||||
# import module snippets
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
from ansible.module_utils.urls import url_argument_spec
|
||||
|
||||
|
||||
class pulp_server(object):
|
||||
"""
|
||||
Class to interact with a Pulp server
|
||||
"""
|
||||
|
||||
def __init__(self, module, pulp_host, repo_type, wait_for_completion=False):
|
||||
self.module = module
|
||||
self.host = pulp_host
|
||||
self.repo_type = repo_type
|
||||
self.repo_cache = dict()
|
||||
self.wait_for_completion = wait_for_completion
|
||||
|
||||
def check_repo_exists(self, repo_id):
|
||||
try:
|
||||
self.get_repo_config_by_id(repo_id)
|
||||
except IndexError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def compare_repo_distributor_config(self, repo_id, **kwargs):
|
||||
repo_config = self.get_repo_config_by_id(repo_id)
|
||||
|
||||
for distributor in repo_config['distributors']:
|
||||
for key, value in kwargs.items():
|
||||
if key not in distributor['config'].keys():
|
||||
return False
|
||||
|
||||
if not distributor['config'][key] == value:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def compare_repo_importer_config(self, repo_id, **kwargs):
|
||||
repo_config = self.get_repo_config_by_id(repo_id)
|
||||
|
||||
for importer in repo_config['importers']:
|
||||
for key, value in kwargs.items():
|
||||
if value is not None:
|
||||
if key not in importer['config'].keys():
|
||||
return False
|
||||
|
||||
if not importer['config'][key] == value:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def create_repo(
|
||||
self,
|
||||
repo_id,
|
||||
relative_url,
|
||||
feed=None,
|
||||
generate_sqlite=False,
|
||||
serve_http=False,
|
||||
serve_https=True,
|
||||
proxy_host=None,
|
||||
proxy_port=None,
|
||||
proxy_username=None,
|
||||
proxy_password=None,
|
||||
repoview=False,
|
||||
ssl_ca_cert=None,
|
||||
ssl_client_cert=None,
|
||||
ssl_client_key=None,
|
||||
add_export_distributor=False
|
||||
):
|
||||
url = "%s/pulp/api/v2/repositories/" % self.host
|
||||
data = dict()
|
||||
data['id'] = repo_id
|
||||
data['distributors'] = []
|
||||
|
||||
if self.repo_type == 'rpm':
|
||||
yum_distributor = dict()
|
||||
yum_distributor['distributor_id'] = "yum_distributor"
|
||||
yum_distributor['distributor_type_id'] = "yum_distributor"
|
||||
yum_distributor['auto_publish'] = True
|
||||
yum_distributor['distributor_config'] = dict()
|
||||
yum_distributor['distributor_config']['http'] = serve_http
|
||||
yum_distributor['distributor_config']['https'] = serve_https
|
||||
yum_distributor['distributor_config']['relative_url'] = relative_url
|
||||
yum_distributor['distributor_config']['repoview'] = repoview
|
||||
yum_distributor['distributor_config']['generate_sqlite'] = generate_sqlite or repoview
|
||||
data['distributors'].append(yum_distributor)
|
||||
|
||||
if add_export_distributor:
|
||||
export_distributor = dict()
|
||||
export_distributor['distributor_id'] = "export_distributor"
|
||||
export_distributor['distributor_type_id'] = "export_distributor"
|
||||
export_distributor['auto_publish'] = False
|
||||
export_distributor['distributor_config'] = dict()
|
||||
export_distributor['distributor_config']['http'] = serve_http
|
||||
export_distributor['distributor_config']['https'] = serve_https
|
||||
export_distributor['distributor_config']['relative_url'] = relative_url
|
||||
export_distributor['distributor_config']['repoview'] = repoview
|
||||
export_distributor['distributor_config']['generate_sqlite'] = generate_sqlite or repoview
|
||||
data['distributors'].append(export_distributor)
|
||||
|
||||
data['importer_type_id'] = "yum_importer"
|
||||
data['importer_config'] = dict()
|
||||
|
||||
if feed:
|
||||
data['importer_config']['feed'] = feed
|
||||
|
||||
if proxy_host:
|
||||
data['importer_config']['proxy_host'] = proxy_host
|
||||
|
||||
if proxy_port:
|
||||
data['importer_config']['proxy_port'] = proxy_port
|
||||
|
||||
if proxy_username:
|
||||
data['importer_config']['proxy_username'] = proxy_username
|
||||
|
||||
if proxy_password:
|
||||
data['importer_config']['proxy_password'] = proxy_password
|
||||
|
||||
if ssl_ca_cert:
|
||||
data['importer_config']['ssl_ca_cert'] = ssl_ca_cert
|
||||
|
||||
if ssl_client_cert:
|
||||
data['importer_config']['ssl_client_cert'] = ssl_client_cert
|
||||
|
||||
if ssl_client_key:
|
||||
data['importer_config']['ssl_client_key'] = ssl_client_key
|
||||
|
||||
data['notes'] = {
|
||||
"_repo-type": "rpm-repo"
|
||||
}
|
||||
|
||||
response, info = fetch_url(
|
||||
self.module,
|
||||
url,
|
||||
data=json.dumps(data),
|
||||
method='POST')
|
||||
|
||||
if info['status'] != 201:
|
||||
self.module.fail_json(
|
||||
msg="Failed to create repo.",
|
||||
status_code=info['status'],
|
||||
response=info['msg'],
|
||||
url=url)
|
||||
else:
|
||||
return True
|
||||
|
||||
def delete_repo(self, repo_id):
|
||||
url = "%s/pulp/api/v2/repositories/%s/" % (self.host, repo_id)
|
||||
response, info = fetch_url(self.module, url, data='', method='DELETE')
|
||||
|
||||
if info['status'] != 202:
|
||||
self.module.fail_json(
|
||||
msg="Failed to delete repo.",
|
||||
status_code=info['status'],
|
||||
response=info['msg'],
|
||||
url=url)
|
||||
|
||||
if self.wait_for_completion:
|
||||
self.verify_tasks_completed(json.load(response))
|
||||
|
||||
return True
|
||||
|
||||
def get_repo_config_by_id(self, repo_id):
|
||||
if repo_id not in self.repo_cache.keys():
|
||||
repo_array = [x for x in self.repo_list if x['id'] == repo_id]
|
||||
self.repo_cache[repo_id] = repo_array[0]
|
||||
|
||||
return self.repo_cache[repo_id]
|
||||
|
||||
def publish_repo(self, repo_id, publish_distributor):
|
||||
url = "%s/pulp/api/v2/repositories/%s/actions/publish/" % (self.host, repo_id)
|
||||
|
||||
# If there's no distributor specified, we will publish them all
|
||||
if publish_distributor is None:
|
||||
repo_config = self.get_repo_config_by_id(repo_id)
|
||||
|
||||
for distributor in repo_config['distributors']:
|
||||
data = dict()
|
||||
data['id'] = distributor['id']
|
||||
response, info = fetch_url(
|
||||
self.module,
|
||||
url,
|
||||
data=json.dumps(data),
|
||||
method='POST')
|
||||
|
||||
if info['status'] != 202:
|
||||
self.module.fail_json(
|
||||
msg="Failed to publish the repo.",
|
||||
status_code=info['status'],
|
||||
response=info['msg'],
|
||||
url=url,
|
||||
distributor=distributor['id'])
|
||||
else:
|
||||
data = dict()
|
||||
data['id'] = publish_distributor
|
||||
response, info = fetch_url(
|
||||
self.module,
|
||||
url,
|
||||
data=json.dumps(data),
|
||||
method='POST')
|
||||
|
||||
if info['status'] != 202:
|
||||
self.module.fail_json(
|
||||
msg="Failed to publish the repo",
|
||||
status_code=info['status'],
|
||||
response=info['msg'],
|
||||
url=url,
|
||||
distributor=publish_distributor)
|
||||
|
||||
if self.wait_for_completion:
|
||||
self.verify_tasks_completed(json.load(response))
|
||||
|
||||
return True
|
||||
|
||||
def sync_repo(self, repo_id):
|
||||
url = "%s/pulp/api/v2/repositories/%s/actions/sync/" % (self.host, repo_id)
|
||||
response, info = fetch_url(self.module, url, data='', method='POST')
|
||||
|
||||
if info['status'] != 202:
|
||||
self.module.fail_json(
|
||||
msg="Failed to schedule a sync of the repo.",
|
||||
status_code=info['status'],
|
||||
response=info['msg'],
|
||||
url=url)
|
||||
|
||||
if self.wait_for_completion:
|
||||
self.verify_tasks_completed(json.load(response))
|
||||
|
||||
return True
|
||||
|
||||
def update_repo_distributor_config(self, repo_id, **kwargs):
|
||||
url = "%s/pulp/api/v2/repositories/%s/distributors/" % (self.host, repo_id)
|
||||
repo_config = self.get_repo_config_by_id(repo_id)
|
||||
|
||||
for distributor in repo_config['distributors']:
|
||||
distributor_url = "%s%s/" % (url, distributor['id'])
|
||||
data = dict()
|
||||
data['distributor_config'] = dict()
|
||||
|
||||
for key, value in kwargs.items():
|
||||
data['distributor_config'][key] = value
|
||||
|
||||
response, info = fetch_url(
|
||||
self.module,
|
||||
distributor_url,
|
||||
data=json.dumps(data),
|
||||
method='PUT')
|
||||
|
||||
if info['status'] != 202:
|
||||
self.module.fail_json(
|
||||
msg="Failed to set the relative url for the repository.",
|
||||
status_code=info['status'],
|
||||
response=info['msg'],
|
||||
url=url)
|
||||
|
||||
def update_repo_importer_config(self, repo_id, **kwargs):
|
||||
url = "%s/pulp/api/v2/repositories/%s/importers/" % (self.host, repo_id)
|
||||
data = dict()
|
||||
importer_config = dict()
|
||||
|
||||
for key, value in kwargs.items():
|
||||
if value is not None:
|
||||
importer_config[key] = value
|
||||
|
||||
data['importer_config'] = importer_config
|
||||
|
||||
if self.repo_type == 'rpm':
|
||||
data['importer_type_id'] = "yum_importer"
|
||||
|
||||
response, info = fetch_url(
|
||||
self.module,
|
||||
url,
|
||||
data=json.dumps(data),
|
||||
method='POST')
|
||||
|
||||
if info['status'] != 202:
|
||||
self.module.fail_json(
|
||||
msg="Failed to set the repo importer configuration",
|
||||
status_code=info['status'],
|
||||
response=info['msg'],
|
||||
importer_config=importer_config,
|
||||
url=url)
|
||||
|
||||
def set_repo_list(self):
|
||||
url = "%s/pulp/api/v2/repositories/?details=true" % self.host
|
||||
response, info = fetch_url(self.module, url, method='GET')
|
||||
|
||||
if info['status'] != 200:
|
||||
self.module.fail_json(
|
||||
msg="Request failed",
|
||||
status_code=info['status'],
|
||||
response=info['msg'],
|
||||
url=url)
|
||||
|
||||
self.repo_list = json.load(response)
|
||||
|
||||
def verify_tasks_completed(self, response_dict):
|
||||
for task in response_dict['spawned_tasks']:
|
||||
task_url = "%s%s" % (self.host, task['_href'])
|
||||
|
||||
while True:
|
||||
response, info = fetch_url(
|
||||
self.module,
|
||||
task_url,
|
||||
data='',
|
||||
method='GET')
|
||||
|
||||
if info['status'] != 200:
|
||||
self.module.fail_json(
|
||||
msg="Failed to check async task status.",
|
||||
status_code=info['status'],
|
||||
response=info['msg'],
|
||||
url=task_url)
|
||||
|
||||
task_dict = json.load(response)
|
||||
|
||||
if task_dict['state'] == 'finished':
|
||||
return True
|
||||
|
||||
if task_dict['state'] == 'error':
|
||||
self.module.fail_json(msg="Asynchronous task failed to complete.", error=task_dict['error'])
|
||||
|
||||
sleep(2)
|
||||
|
||||
|
||||
def main():
|
||||
argument_spec = url_argument_spec()
|
||||
argument_spec.update(
|
||||
add_export_distributor=dict(default=False, type='bool'),
|
||||
feed=dict(),
|
||||
generate_sqlite=dict(default=False, type='bool'),
|
||||
feed_ca_cert=dict(aliases=['importer_ssl_ca_cert', 'ca_cert'], deprecated_aliases=[dict(name='ca_cert', version='2.14')]),
|
||||
feed_client_cert=dict(aliases=['importer_ssl_client_cert']),
|
||||
feed_client_key=dict(aliases=['importer_ssl_client_key']),
|
||||
name=dict(required=True, aliases=['repo']),
|
||||
proxy_host=dict(),
|
||||
proxy_port=dict(),
|
||||
proxy_username=dict(),
|
||||
proxy_password=dict(no_log=True),
|
||||
publish_distributor=dict(),
|
||||
pulp_host=dict(default="https://127.0.0.1"),
|
||||
relative_url=dict(),
|
||||
repo_type=dict(default="rpm"),
|
||||
repoview=dict(default=False, type='bool'),
|
||||
serve_http=dict(default=False, type='bool'),
|
||||
serve_https=dict(default=True, type='bool'),
|
||||
state=dict(
|
||||
default="present",
|
||||
choices=['absent', 'present', 'sync', 'publish']),
|
||||
wait_for_completion=dict(default=False, type="bool"))
|
||||
module = AnsibleModule(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True)
|
||||
|
||||
add_export_distributor = module.params['add_export_distributor']
|
||||
feed = module.params['feed']
|
||||
generate_sqlite = module.params['generate_sqlite']
|
||||
importer_ssl_ca_cert = module.params['feed_ca_cert']
|
||||
importer_ssl_client_cert = module.params['feed_client_cert']
|
||||
if importer_ssl_client_cert is None and module.params['client_cert'] is not None:
|
||||
importer_ssl_client_cert = module.params['client_cert']
|
||||
module.deprecate("To specify client certificates to be used with the repo to sync, and not for communication with the "
|
||||
"Pulp instance, use the new options `feed_client_cert` and `feed_client_key` (available since "
|
||||
"Ansible 2.9.2). Until Ansible 2.14, the default value for `feed_client_cert` will be taken from "
|
||||
"`client_cert` if only the latter is specified", version="2.14")
|
||||
importer_ssl_client_key = module.params['feed_client_key']
|
||||
if importer_ssl_client_key is None and module.params['client_key'] is not None:
|
||||
importer_ssl_client_key = module.params['client_key']
|
||||
module.deprecate("In Ansible 2.9.2 `feed_client_key` option was added. Until 2.14 the default value will come from client_key option", version="2.14")
|
||||
proxy_host = module.params['proxy_host']
|
||||
proxy_port = module.params['proxy_port']
|
||||
proxy_username = module.params['proxy_username']
|
||||
proxy_password = module.params['proxy_password']
|
||||
publish_distributor = module.params['publish_distributor']
|
||||
pulp_host = module.params['pulp_host']
|
||||
relative_url = module.params['relative_url']
|
||||
repo = module.params['name']
|
||||
repo_type = module.params['repo_type']
|
||||
repoview = module.params['repoview']
|
||||
serve_http = module.params['serve_http']
|
||||
serve_https = module.params['serve_https']
|
||||
state = module.params['state']
|
||||
wait_for_completion = module.params['wait_for_completion']
|
||||
|
||||
if (state == 'present') and (not relative_url):
|
||||
module.fail_json(msg="When state is present, relative_url is required.")
|
||||
|
||||
# Ensure that the importer_ssl_* is the content and not a file path
|
||||
if importer_ssl_ca_cert is not None:
|
||||
importer_ssl_ca_cert_file_path = os.path.abspath(importer_ssl_ca_cert)
|
||||
if os.path.isfile(importer_ssl_ca_cert_file_path):
|
||||
importer_ssl_ca_cert_file_object = open(importer_ssl_ca_cert_file_path, 'r')
|
||||
try:
|
||||
importer_ssl_ca_cert = importer_ssl_ca_cert_file_object.read()
|
||||
finally:
|
||||
importer_ssl_ca_cert_file_object.close()
|
||||
|
||||
if importer_ssl_client_cert is not None:
|
||||
importer_ssl_client_cert_file_path = os.path.abspath(importer_ssl_client_cert)
|
||||
if os.path.isfile(importer_ssl_client_cert_file_path):
|
||||
importer_ssl_client_cert_file_object = open(importer_ssl_client_cert_file_path, 'r')
|
||||
try:
|
||||
importer_ssl_client_cert = importer_ssl_client_cert_file_object.read()
|
||||
finally:
|
||||
importer_ssl_client_cert_file_object.close()
|
||||
|
||||
if importer_ssl_client_key is not None:
|
||||
importer_ssl_client_key_file_path = os.path.abspath(importer_ssl_client_key)
|
||||
if os.path.isfile(importer_ssl_client_key_file_path):
|
||||
importer_ssl_client_key_file_object = open(importer_ssl_client_key_file_path, 'r')
|
||||
try:
|
||||
importer_ssl_client_key = importer_ssl_client_key_file_object.read()
|
||||
finally:
|
||||
importer_ssl_client_key_file_object.close()
|
||||
|
||||
server = pulp_server(module, pulp_host, repo_type, wait_for_completion=wait_for_completion)
|
||||
server.set_repo_list()
|
||||
repo_exists = server.check_repo_exists(repo)
|
||||
|
||||
changed = False
|
||||
|
||||
if state == 'absent' and repo_exists:
|
||||
if not module.check_mode:
|
||||
server.delete_repo(repo)
|
||||
|
||||
changed = True
|
||||
|
||||
if state == 'sync':
|
||||
if not repo_exists:
|
||||
module.fail_json(msg="Repository was not found. The repository can not be synced.")
|
||||
|
||||
if not module.check_mode:
|
||||
server.sync_repo(repo)
|
||||
|
||||
changed = True
|
||||
|
||||
if state == 'publish':
|
||||
if not repo_exists:
|
||||
module.fail_json(msg="Repository was not found. The repository can not be published.")
|
||||
|
||||
if not module.check_mode:
|
||||
server.publish_repo(repo, publish_distributor)
|
||||
|
||||
changed = True
|
||||
|
||||
if state == 'present':
|
||||
if not repo_exists:
|
||||
if not module.check_mode:
|
||||
server.create_repo(
|
||||
repo_id=repo,
|
||||
relative_url=relative_url,
|
||||
feed=feed,
|
||||
generate_sqlite=generate_sqlite,
|
||||
serve_http=serve_http,
|
||||
serve_https=serve_https,
|
||||
proxy_host=proxy_host,
|
||||
proxy_port=proxy_port,
|
||||
proxy_username=proxy_username,
|
||||
proxy_password=proxy_password,
|
||||
repoview=repoview,
|
||||
ssl_ca_cert=importer_ssl_ca_cert,
|
||||
ssl_client_cert=importer_ssl_client_cert,
|
||||
ssl_client_key=importer_ssl_client_key,
|
||||
add_export_distributor=add_export_distributor)
|
||||
|
||||
changed = True
|
||||
|
||||
else:
|
||||
# Check to make sure all the settings are correct
|
||||
# The importer config gets overwritten on set and not updated, so
|
||||
# we set the whole config at the same time.
|
||||
if not server.compare_repo_importer_config(
|
||||
repo,
|
||||
feed=feed,
|
||||
proxy_host=proxy_host,
|
||||
proxy_port=proxy_port,
|
||||
proxy_username=proxy_username,
|
||||
proxy_password=proxy_password,
|
||||
ssl_ca_cert=importer_ssl_ca_cert,
|
||||
ssl_client_cert=importer_ssl_client_cert,
|
||||
ssl_client_key=importer_ssl_client_key
|
||||
):
|
||||
if not module.check_mode:
|
||||
server.update_repo_importer_config(
|
||||
repo,
|
||||
feed=feed,
|
||||
proxy_host=proxy_host,
|
||||
proxy_port=proxy_port,
|
||||
proxy_username=proxy_username,
|
||||
proxy_password=proxy_password,
|
||||
ssl_ca_cert=importer_ssl_ca_cert,
|
||||
ssl_client_cert=importer_ssl_client_cert,
|
||||
ssl_client_key=importer_ssl_client_key)
|
||||
|
||||
changed = True
|
||||
|
||||
if relative_url is not None:
|
||||
if not server.compare_repo_distributor_config(
|
||||
repo,
|
||||
relative_url=relative_url
|
||||
):
|
||||
if not module.check_mode:
|
||||
server.update_repo_distributor_config(
|
||||
repo,
|
||||
relative_url=relative_url)
|
||||
|
||||
changed = True
|
||||
|
||||
if not server.compare_repo_distributor_config(repo, generate_sqlite=generate_sqlite):
|
||||
if not module.check_mode:
|
||||
server.update_repo_distributor_config(repo, generate_sqlite=generate_sqlite)
|
||||
|
||||
changed = True
|
||||
|
||||
if not server.compare_repo_distributor_config(repo, repoview=repoview):
|
||||
if not module.check_mode:
|
||||
server.update_repo_distributor_config(repo, repoview=repoview)
|
||||
|
||||
changed = True
|
||||
|
||||
if not server.compare_repo_distributor_config(repo, http=serve_http):
|
||||
if not module.check_mode:
|
||||
server.update_repo_distributor_config(repo, http=serve_http)
|
||||
|
||||
changed = True
|
||||
|
||||
if not server.compare_repo_distributor_config(repo, https=serve_https):
|
||||
if not module.check_mode:
|
||||
server.update_repo_distributor_config(repo, https=serve_https)
|
||||
|
||||
changed = True
|
||||
|
||||
module.exit_json(changed=changed, repo=repo)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
914
plugins/modules/packaging/os/redhat_subscription.py
Normal file
914
plugins/modules/packaging/os/redhat_subscription.py
Normal file
@@ -0,0 +1,914 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# James Laska (jlaska@redhat.com)
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: redhat_subscription
|
||||
short_description: Manage registration and subscriptions to RHSM using the C(subscription-manager) command
|
||||
description:
|
||||
- Manage registration and subscription to the Red Hat Subscription Management entitlement platform using the C(subscription-manager) command
|
||||
author: "Barnaby Court (@barnabycourt)"
|
||||
notes:
|
||||
- In order to register a system, subscription-manager requires either a username and password, or an activationkey and an Organization ID.
|
||||
- Since 2.5 values for I(server_hostname), I(server_insecure), I(rhsm_baseurl),
|
||||
I(server_proxy_hostname), I(server_proxy_port), I(server_proxy_user) and
|
||||
I(server_proxy_password) are no longer taken from the C(/etc/rhsm/rhsm.conf)
|
||||
config file and default to None.
|
||||
requirements:
|
||||
- subscription-manager
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- whether to register and subscribe (C(present)), or unregister (C(absent)) a system
|
||||
choices: [ "present", "absent" ]
|
||||
default: "present"
|
||||
username:
|
||||
description:
|
||||
- access.redhat.com or Sat6 username
|
||||
password:
|
||||
description:
|
||||
- access.redhat.com or Sat6 password
|
||||
server_hostname:
|
||||
description:
|
||||
- Specify an alternative Red Hat Subscription Management or Sat6 server
|
||||
server_insecure:
|
||||
description:
|
||||
- Enable or disable https server certificate verification when connecting to C(server_hostname)
|
||||
rhsm_baseurl:
|
||||
description:
|
||||
- Specify CDN baseurl
|
||||
rhsm_repo_ca_cert:
|
||||
description:
|
||||
- Specify an alternative location for a CA certificate for CDN
|
||||
server_proxy_hostname:
|
||||
description:
|
||||
- Specify a HTTP proxy hostname
|
||||
server_proxy_port:
|
||||
description:
|
||||
- Specify a HTTP proxy port
|
||||
server_proxy_user:
|
||||
description:
|
||||
- Specify a user for HTTP proxy with basic authentication
|
||||
server_proxy_password:
|
||||
description:
|
||||
- Specify a password for HTTP proxy with basic authentication
|
||||
auto_attach:
|
||||
description:
|
||||
- Upon successful registration, auto-consume available subscriptions
|
||||
- Added in favor of deprecated autosubscribe in 2.5.
|
||||
type: bool
|
||||
default: 'no'
|
||||
aliases: [autosubscribe]
|
||||
activationkey:
|
||||
description:
|
||||
- supply an activation key for use with registration
|
||||
org_id:
|
||||
description:
|
||||
- Organization ID to use in conjunction with activationkey
|
||||
environment:
|
||||
description:
|
||||
- Register with a specific environment in the destination org. Used with Red Hat Satellite 6.x or Katello
|
||||
pool:
|
||||
description:
|
||||
- |
|
||||
Specify a subscription pool name to consume. Regular expressions accepted. Use I(pool_ids) instead if
|
||||
possible, as it is much faster. Mutually exclusive with I(pool_ids).
|
||||
default: '^$'
|
||||
pool_ids:
|
||||
description:
|
||||
- |
|
||||
Specify subscription pool IDs to consume. Prefer over I(pool) when possible as it is much faster.
|
||||
A pool ID may be specified as a C(string) - just the pool ID (ex. C(0123456789abcdef0123456789abcdef)),
|
||||
or as a C(dict) with the pool ID as the key, and a quantity as the value (ex.
|
||||
C(0123456789abcdef0123456789abcdef: 2). If the quantity is provided, it is used to consume multiple
|
||||
entitlements from a pool (the pool must support this). Mutually exclusive with I(pool).
|
||||
default: []
|
||||
consumer_type:
|
||||
description:
|
||||
- The type of unit to register, defaults to system
|
||||
consumer_name:
|
||||
description:
|
||||
- Name of the system to register, defaults to the hostname
|
||||
consumer_id:
|
||||
description:
|
||||
- |
|
||||
References an existing consumer ID to resume using a previous registration
|
||||
for this system. If the system's identity certificate is lost or corrupted,
|
||||
this option allows it to resume using its previous identity and subscriptions.
|
||||
The default is to not specify a consumer ID so a new ID is created.
|
||||
force_register:
|
||||
description:
|
||||
- Register the system even if it is already registered
|
||||
type: bool
|
||||
default: 'no'
|
||||
release:
|
||||
description:
|
||||
- Set a release version
|
||||
syspurpose:
|
||||
description:
|
||||
- Set syspurpose attributes in file C(/etc/rhsm/syspurpose/syspurpose.json)
|
||||
and synchronize these attributes with RHSM server. Syspurpose attributes help attach
|
||||
the most appropriate subscriptions to the system automatically. When C(syspurpose.json) file
|
||||
already contains some attributes, then new attributes overwrite existing attributes.
|
||||
When some attribute is not listed in the new list of attributes, the existing
|
||||
attribute will be removed from C(syspurpose.json) file. Unknown attributes are ignored.
|
||||
type: dict
|
||||
default: {}
|
||||
suboptions:
|
||||
usage:
|
||||
description: Syspurpose attribute usage
|
||||
role:
|
||||
description: Syspurpose attribute role
|
||||
service_level_agreement:
|
||||
description: Syspurpose attribute service_level_agreement
|
||||
addons:
|
||||
description: Syspurpose attribute addons
|
||||
type: list
|
||||
sync:
|
||||
description:
|
||||
- When this option is true, then syspurpose attributes are synchronized with
|
||||
RHSM server immediately. When this option is false, then syspurpose attributes
|
||||
will be synchronized with RHSM server by rhsmcertd daemon.
|
||||
type: bool
|
||||
default: False
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Register as user (joe_user) with password (somepass) and auto-subscribe to available content.
|
||||
redhat_subscription:
|
||||
state: present
|
||||
username: joe_user
|
||||
password: somepass
|
||||
auto_attach: true
|
||||
|
||||
- name: Same as above but subscribe to a specific pool by ID.
|
||||
redhat_subscription:
|
||||
state: present
|
||||
username: joe_user
|
||||
password: somepass
|
||||
pool_ids: 0123456789abcdef0123456789abcdef
|
||||
|
||||
- name: Register and subscribe to multiple pools.
|
||||
redhat_subscription:
|
||||
state: present
|
||||
username: joe_user
|
||||
password: somepass
|
||||
pool_ids:
|
||||
- 0123456789abcdef0123456789abcdef
|
||||
- 1123456789abcdef0123456789abcdef
|
||||
|
||||
- name: Same as above but consume multiple entitlements.
|
||||
redhat_subscription:
|
||||
state: present
|
||||
username: joe_user
|
||||
password: somepass
|
||||
pool_ids:
|
||||
- 0123456789abcdef0123456789abcdef: 2
|
||||
- 1123456789abcdef0123456789abcdef: 4
|
||||
|
||||
- name: Register and pull existing system data.
|
||||
redhat_subscription:
|
||||
state: present
|
||||
username: joe_user
|
||||
password: somepass
|
||||
consumer_id: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
|
||||
|
||||
- name: Register with activationkey and consume subscriptions matching Red Hat Enterprise Server or Red Hat Virtualization
|
||||
redhat_subscription:
|
||||
state: present
|
||||
activationkey: 1-222333444
|
||||
org_id: 222333444
|
||||
pool: '^(Red Hat Enterprise Server|Red Hat Virtualization)$'
|
||||
|
||||
- name: Update the consumed subscriptions from the previous example (remove Red Hat Virtualization subscription)
|
||||
redhat_subscription:
|
||||
state: present
|
||||
activationkey: 1-222333444
|
||||
org_id: 222333444
|
||||
pool: '^Red Hat Enterprise Server$'
|
||||
|
||||
- name: Register as user credentials into given environment (against Red Hat Satellite 6.x), and auto-subscribe.
|
||||
redhat_subscription:
|
||||
state: present
|
||||
username: joe_user
|
||||
password: somepass
|
||||
environment: Library
|
||||
auto_attach: true
|
||||
|
||||
- name: Register as user (joe_user) with password (somepass) and a specific release
|
||||
redhat_subscription:
|
||||
state: present
|
||||
username: joe_user
|
||||
password: somepass
|
||||
release: 7.4
|
||||
|
||||
- name: Register as user (joe_user) with password (somepass), set syspurpose attributes and synchronize them with server
|
||||
redhat_subscription:
|
||||
state: present
|
||||
username: joe_user
|
||||
password: somepass
|
||||
auto_attach: true
|
||||
syspurpose:
|
||||
usage: "Production"
|
||||
role: "Red Hat Enterprise Server"
|
||||
service_level_agreement: "Premium"
|
||||
addons:
|
||||
- addon1
|
||||
- addon2
|
||||
sync: true
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
subscribed_pool_ids:
|
||||
description: List of pool IDs to which system is now subscribed
|
||||
returned: success
|
||||
type: complex
|
||||
sample: {
|
||||
"8a85f9815ab905d3015ab928c7005de4": "1"
|
||||
}
|
||||
'''
|
||||
|
||||
from os.path import isfile
|
||||
from os import unlink
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
import json
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.six.moves import configparser
|
||||
|
||||
|
||||
SUBMAN_CMD = None
|
||||
|
||||
|
||||
class RegistrationBase(object):
|
||||
|
||||
REDHAT_REPO = "/etc/yum.repos.d/redhat.repo"
|
||||
|
||||
def __init__(self, module, username=None, password=None):
|
||||
self.module = module
|
||||
self.username = username
|
||||
self.password = password
|
||||
|
||||
def configure(self):
|
||||
raise NotImplementedError("Must be implemented by a sub-class")
|
||||
|
||||
def enable(self):
|
||||
# Remove any existing redhat.repo
|
||||
if isfile(self.REDHAT_REPO):
|
||||
unlink(self.REDHAT_REPO)
|
||||
|
||||
def register(self):
|
||||
raise NotImplementedError("Must be implemented by a sub-class")
|
||||
|
||||
def unregister(self):
|
||||
raise NotImplementedError("Must be implemented by a sub-class")
|
||||
|
||||
def unsubscribe(self):
|
||||
raise NotImplementedError("Must be implemented by a sub-class")
|
||||
|
||||
def update_plugin_conf(self, plugin, enabled=True):
|
||||
plugin_conf = '/etc/yum/pluginconf.d/%s.conf' % plugin
|
||||
|
||||
if isfile(plugin_conf):
|
||||
tmpfd, tmpfile = tempfile.mkstemp()
|
||||
shutil.copy2(plugin_conf, tmpfile)
|
||||
cfg = configparser.ConfigParser()
|
||||
cfg.read([tmpfile])
|
||||
|
||||
if enabled:
|
||||
cfg.set('main', 'enabled', '1')
|
||||
else:
|
||||
cfg.set('main', 'enabled', '0')
|
||||
|
||||
fd = open(tmpfile, 'w+')
|
||||
cfg.write(fd)
|
||||
fd.close()
|
||||
self.module.atomic_move(tmpfile, plugin_conf)
|
||||
|
||||
def subscribe(self, **kwargs):
|
||||
raise NotImplementedError("Must be implemented by a sub-class")
|
||||
|
||||
|
||||
class Rhsm(RegistrationBase):
|
||||
def __init__(self, module, username=None, password=None):
|
||||
RegistrationBase.__init__(self, module, username, password)
|
||||
self.module = module
|
||||
|
||||
def enable(self):
|
||||
'''
|
||||
Enable the system to receive updates from subscription-manager.
|
||||
This involves updating affected yum plugins and removing any
|
||||
conflicting yum repositories.
|
||||
'''
|
||||
RegistrationBase.enable(self)
|
||||
self.update_plugin_conf('rhnplugin', False)
|
||||
self.update_plugin_conf('subscription-manager', True)
|
||||
|
||||
def configure(self, **kwargs):
|
||||
'''
|
||||
Configure the system as directed for registration with RHSM
|
||||
Raises:
|
||||
* Exception - if error occurs while running command
|
||||
'''
|
||||
|
||||
args = [SUBMAN_CMD, 'config']
|
||||
|
||||
# Pass supplied **kwargs as parameters to subscription-manager. Ignore
|
||||
# non-configuration parameters and replace '_' with '.'. For example,
|
||||
# 'server_hostname' becomes '--server.hostname'.
|
||||
options = []
|
||||
for k, v in sorted(kwargs.items()):
|
||||
if re.search(r'^(server|rhsm)_', k) and v is not None:
|
||||
options.append('--%s=%s' % (k.replace('_', '.', 1), v))
|
||||
|
||||
# When there is nothing to configure, then it is not necessary
|
||||
# to run config command, because it only returns current
|
||||
# content of current configuration file
|
||||
if len(options) == 0:
|
||||
return
|
||||
|
||||
args.extend(options)
|
||||
|
||||
self.module.run_command(args, check_rc=True)
|
||||
|
||||
@property
|
||||
def is_registered(self):
|
||||
'''
|
||||
Determine whether the current system
|
||||
Returns:
|
||||
* Boolean - whether the current system is currently registered to
|
||||
RHSM.
|
||||
'''
|
||||
|
||||
args = [SUBMAN_CMD, 'identity']
|
||||
rc, stdout, stderr = self.module.run_command(args, check_rc=False)
|
||||
if rc == 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def register(self, username, password, auto_attach, activationkey, org_id,
|
||||
consumer_type, consumer_name, consumer_id, force_register, environment,
|
||||
rhsm_baseurl, server_insecure, server_hostname, server_proxy_hostname,
|
||||
server_proxy_port, server_proxy_user, server_proxy_password, release):
|
||||
'''
|
||||
Register the current system to the provided RHSM or Sat6 server
|
||||
Raises:
|
||||
* Exception - if error occurs while running command
|
||||
'''
|
||||
args = [SUBMAN_CMD, 'register']
|
||||
|
||||
# Generate command arguments
|
||||
if force_register:
|
||||
args.extend(['--force'])
|
||||
|
||||
if rhsm_baseurl:
|
||||
args.extend(['--baseurl', rhsm_baseurl])
|
||||
|
||||
if server_insecure:
|
||||
args.extend(['--insecure'])
|
||||
|
||||
if server_hostname:
|
||||
args.extend(['--serverurl', server_hostname])
|
||||
|
||||
if org_id:
|
||||
args.extend(['--org', org_id])
|
||||
|
||||
if server_proxy_hostname and server_proxy_port:
|
||||
args.extend(['--proxy', server_proxy_hostname + ':' + server_proxy_port])
|
||||
|
||||
if server_proxy_user:
|
||||
args.extend(['--proxyuser', server_proxy_user])
|
||||
|
||||
if server_proxy_password:
|
||||
args.extend(['--proxypassword', server_proxy_password])
|
||||
|
||||
if activationkey:
|
||||
args.extend(['--activationkey', activationkey])
|
||||
else:
|
||||
if auto_attach:
|
||||
args.append('--auto-attach')
|
||||
if username:
|
||||
args.extend(['--username', username])
|
||||
if password:
|
||||
args.extend(['--password', password])
|
||||
if consumer_type:
|
||||
args.extend(['--type', consumer_type])
|
||||
if consumer_name:
|
||||
args.extend(['--name', consumer_name])
|
||||
if consumer_id:
|
||||
args.extend(['--consumerid', consumer_id])
|
||||
if environment:
|
||||
args.extend(['--environment', environment])
|
||||
|
||||
if release:
|
||||
args.extend(['--release', release])
|
||||
|
||||
rc, stderr, stdout = self.module.run_command(args, check_rc=True, expand_user_and_vars=False)
|
||||
|
||||
def unsubscribe(self, serials=None):
|
||||
'''
|
||||
Unsubscribe a system from subscribed channels
|
||||
Args:
|
||||
serials(list or None): list of serials to unsubscribe. If
|
||||
serials is none or an empty list, then
|
||||
all subscribed channels will be removed.
|
||||
Raises:
|
||||
* Exception - if error occurs while running command
|
||||
'''
|
||||
items = []
|
||||
if serials is not None and serials:
|
||||
items = ["--serial=%s" % s for s in serials]
|
||||
if serials is None:
|
||||
items = ["--all"]
|
||||
|
||||
if items:
|
||||
args = [SUBMAN_CMD, 'unsubscribe'] + items
|
||||
rc, stderr, stdout = self.module.run_command(args, check_rc=True)
|
||||
return serials
|
||||
|
||||
def unregister(self):
|
||||
'''
|
||||
Unregister a currently registered system
|
||||
Raises:
|
||||
* Exception - if error occurs while running command
|
||||
'''
|
||||
args = [SUBMAN_CMD, 'unregister']
|
||||
rc, stderr, stdout = self.module.run_command(args, check_rc=True)
|
||||
self.update_plugin_conf('rhnplugin', False)
|
||||
self.update_plugin_conf('subscription-manager', False)
|
||||
|
||||
def subscribe(self, regexp):
|
||||
'''
|
||||
Subscribe current system to available pools matching the specified
|
||||
regular expression. It matches regexp against available pool ids first.
|
||||
If any pool ids match, subscribe to those pools and return.
|
||||
|
||||
If no pool ids match, then match regexp against available pool product
|
||||
names. Note this can still easily match many many pools. Then subscribe
|
||||
to those pools.
|
||||
|
||||
Since a pool id is a more specific match, we only fallback to matching
|
||||
against names if we didn't match pool ids.
|
||||
|
||||
Raises:
|
||||
* Exception - if error occurs while running command
|
||||
'''
|
||||
# See https://github.com/ansible/ansible/issues/19466
|
||||
|
||||
# subscribe to pools whose pool id matches regexp (and only the pool id)
|
||||
subscribed_pool_ids = self.subscribe_pool(regexp)
|
||||
|
||||
# If we found any matches, we are done
|
||||
# Don't attempt to match pools by product name
|
||||
if subscribed_pool_ids:
|
||||
return subscribed_pool_ids
|
||||
|
||||
# We didn't match any pool ids.
|
||||
# Now try subscribing to pools based on product name match
|
||||
# Note: This can match lots of product names.
|
||||
subscribed_by_product_pool_ids = self.subscribe_product(regexp)
|
||||
if subscribed_by_product_pool_ids:
|
||||
return subscribed_by_product_pool_ids
|
||||
|
||||
# no matches
|
||||
return []
|
||||
|
||||
def subscribe_by_pool_ids(self, pool_ids):
|
||||
"""
|
||||
Try to subscribe to the list of pool IDs
|
||||
"""
|
||||
available_pools = RhsmPools(self.module)
|
||||
|
||||
available_pool_ids = [p.get_pool_id() for p in available_pools]
|
||||
|
||||
for pool_id, quantity in sorted(pool_ids.items()):
|
||||
if pool_id in available_pool_ids:
|
||||
args = [SUBMAN_CMD, 'attach', '--pool', pool_id]
|
||||
if quantity is not None:
|
||||
args.extend(['--quantity', to_native(quantity)])
|
||||
rc, stderr, stdout = self.module.run_command(args, check_rc=True)
|
||||
else:
|
||||
self.module.fail_json(msg='Pool ID: %s not in list of available pools' % pool_id)
|
||||
return pool_ids
|
||||
|
||||
def subscribe_pool(self, regexp):
|
||||
'''
|
||||
Subscribe current system to available pools matching the specified
|
||||
regular expression
|
||||
Raises:
|
||||
* Exception - if error occurs while running command
|
||||
'''
|
||||
|
||||
# Available pools ready for subscription
|
||||
available_pools = RhsmPools(self.module)
|
||||
|
||||
subscribed_pool_ids = []
|
||||
for pool in available_pools.filter_pools(regexp):
|
||||
pool.subscribe()
|
||||
subscribed_pool_ids.append(pool.get_pool_id())
|
||||
return subscribed_pool_ids
|
||||
|
||||
def subscribe_product(self, regexp):
|
||||
'''
|
||||
Subscribe current system to available pools matching the specified
|
||||
regular expression
|
||||
Raises:
|
||||
* Exception - if error occurs while running command
|
||||
'''
|
||||
|
||||
# Available pools ready for subscription
|
||||
available_pools = RhsmPools(self.module)
|
||||
|
||||
subscribed_pool_ids = []
|
||||
for pool in available_pools.filter_products(regexp):
|
||||
pool.subscribe()
|
||||
subscribed_pool_ids.append(pool.get_pool_id())
|
||||
return subscribed_pool_ids
|
||||
|
||||
def update_subscriptions(self, regexp):
|
||||
changed = False
|
||||
consumed_pools = RhsmPools(self.module, consumed=True)
|
||||
pool_ids_to_keep = [p.get_pool_id() for p in consumed_pools.filter_pools(regexp)]
|
||||
pool_ids_to_keep.extend([p.get_pool_id() for p in consumed_pools.filter_products(regexp)])
|
||||
|
||||
serials_to_remove = [p.Serial for p in consumed_pools if p.get_pool_id() not in pool_ids_to_keep]
|
||||
serials = self.unsubscribe(serials=serials_to_remove)
|
||||
|
||||
subscribed_pool_ids = self.subscribe(regexp)
|
||||
|
||||
if subscribed_pool_ids or serials:
|
||||
changed = True
|
||||
return {'changed': changed, 'subscribed_pool_ids': subscribed_pool_ids,
|
||||
'unsubscribed_serials': serials}
|
||||
|
||||
def update_subscriptions_by_pool_ids(self, pool_ids):
|
||||
changed = False
|
||||
consumed_pools = RhsmPools(self.module, consumed=True)
|
||||
|
||||
existing_pools = {}
|
||||
for p in consumed_pools:
|
||||
existing_pools[p.get_pool_id()] = p.QuantityUsed
|
||||
|
||||
serials_to_remove = [p.Serial for p in consumed_pools if pool_ids.get(p.get_pool_id(), 0) != p.QuantityUsed]
|
||||
serials = self.unsubscribe(serials=serials_to_remove)
|
||||
|
||||
missing_pools = {}
|
||||
for pool_id, quantity in sorted(pool_ids.items()):
|
||||
if existing_pools.get(pool_id, 0) != quantity:
|
||||
missing_pools[pool_id] = quantity
|
||||
|
||||
self.subscribe_by_pool_ids(missing_pools)
|
||||
|
||||
if missing_pools or serials:
|
||||
changed = True
|
||||
return {'changed': changed, 'subscribed_pool_ids': missing_pools.keys(),
|
||||
'unsubscribed_serials': serials}
|
||||
|
||||
def sync_syspurpose(self):
|
||||
"""
|
||||
Try to synchronize syspurpose attributes with server
|
||||
"""
|
||||
args = [SUBMAN_CMD, 'status']
|
||||
rc, stdout, stderr = self.module.run_command(args, check_rc=False)
|
||||
|
||||
|
||||
class RhsmPool(object):
|
||||
'''
|
||||
Convenience class for housing subscription information
|
||||
'''
|
||||
|
||||
def __init__(self, module, **kwargs):
|
||||
self.module = module
|
||||
for k, v in kwargs.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.__getattribute__('_name'))
|
||||
|
||||
def get_pool_id(self):
|
||||
return getattr(self, 'PoolId', getattr(self, 'PoolID'))
|
||||
|
||||
def subscribe(self):
|
||||
args = "subscription-manager attach --pool %s" % self.get_pool_id()
|
||||
rc, stdout, stderr = self.module.run_command(args, check_rc=True)
|
||||
if rc == 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
class RhsmPools(object):
|
||||
"""
|
||||
This class is used for manipulating pools subscriptions with RHSM
|
||||
"""
|
||||
|
||||
def __init__(self, module, consumed=False):
|
||||
self.module = module
|
||||
self.products = self._load_product_list(consumed)
|
||||
|
||||
def __iter__(self):
|
||||
return self.products.__iter__()
|
||||
|
||||
def _load_product_list(self, consumed=False):
|
||||
"""
|
||||
Loads list of all available or consumed pools for system in data structure
|
||||
|
||||
Args:
|
||||
consumed(bool): if True list consumed pools, else list available pools (default False)
|
||||
"""
|
||||
args = "subscription-manager list"
|
||||
if consumed:
|
||||
args += " --consumed"
|
||||
else:
|
||||
args += " --available"
|
||||
lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
|
||||
rc, stdout, stderr = self.module.run_command(args, check_rc=True, environ_update=lang_env)
|
||||
|
||||
products = []
|
||||
for line in stdout.split('\n'):
|
||||
# Remove leading+trailing whitespace
|
||||
line = line.strip()
|
||||
# An empty line implies the end of a output group
|
||||
if len(line) == 0:
|
||||
continue
|
||||
# If a colon ':' is found, parse
|
||||
elif ':' in line:
|
||||
(key, value) = line.split(':', 1)
|
||||
key = key.strip().replace(" ", "") # To unify
|
||||
value = value.strip()
|
||||
if key in ['ProductName', 'SubscriptionName']:
|
||||
# Remember the name for later processing
|
||||
products.append(RhsmPool(self.module, _name=value, key=value))
|
||||
elif products:
|
||||
# Associate value with most recently recorded product
|
||||
products[-1].__setattr__(key, value)
|
||||
# FIXME - log some warning?
|
||||
# else:
|
||||
# warnings.warn("Unhandled subscription key/value: %s/%s" % (key,value))
|
||||
return products
|
||||
|
||||
def filter_pools(self, regexp='^$'):
|
||||
'''
|
||||
Return a list of RhsmPools whose pool id matches the provided regular expression
|
||||
'''
|
||||
r = re.compile(regexp)
|
||||
for product in self.products:
|
||||
if r.search(product.get_pool_id()):
|
||||
yield product
|
||||
|
||||
def filter_products(self, regexp='^$'):
|
||||
'''
|
||||
Return a list of RhsmPools whose product name matches the provided regular expression
|
||||
'''
|
||||
r = re.compile(regexp)
|
||||
for product in self.products:
|
||||
if r.search(product._name):
|
||||
yield product
|
||||
|
||||
|
||||
class SysPurpose(object):
|
||||
"""
|
||||
This class is used for reading and writing to syspurpose.json file
|
||||
"""
|
||||
|
||||
SYSPURPOSE_FILE_PATH = "/etc/rhsm/syspurpose/syspurpose.json"
|
||||
|
||||
ALLOWED_ATTRIBUTES = ['role', 'usage', 'service_level_agreement', 'addons']
|
||||
|
||||
def __init__(self, path=None):
|
||||
"""
|
||||
Initialize class used for reading syspurpose json file
|
||||
"""
|
||||
self.path = path or self.SYSPURPOSE_FILE_PATH
|
||||
|
||||
def update_syspurpose(self, new_syspurpose):
|
||||
"""
|
||||
Try to update current syspurpose with new attributes from new_syspurpose
|
||||
"""
|
||||
syspurpose = {}
|
||||
syspurpose_changed = False
|
||||
for key, value in new_syspurpose.items():
|
||||
if key in self.ALLOWED_ATTRIBUTES:
|
||||
if value is not None:
|
||||
syspurpose[key] = value
|
||||
elif key == 'sync':
|
||||
pass
|
||||
else:
|
||||
raise KeyError("Attribute: %s not in list of allowed attributes: %s" %
|
||||
(key, self.ALLOWED_ATTRIBUTES))
|
||||
current_syspurpose = self._read_syspurpose()
|
||||
if current_syspurpose != syspurpose:
|
||||
syspurpose_changed = True
|
||||
# Update current syspurpose with new values
|
||||
current_syspurpose.update(syspurpose)
|
||||
# When some key is not listed in new syspurpose, then delete it from current syspurpose
|
||||
# and ignore custom attributes created by user (e.g. "foo": "bar")
|
||||
for key in list(current_syspurpose):
|
||||
if key in self.ALLOWED_ATTRIBUTES and key not in syspurpose:
|
||||
del current_syspurpose[key]
|
||||
self._write_syspurpose(current_syspurpose)
|
||||
return syspurpose_changed
|
||||
|
||||
def _write_syspurpose(self, new_syspurpose):
|
||||
"""
|
||||
This function tries to update current new_syspurpose attributes to
|
||||
json file.
|
||||
"""
|
||||
with open(self.path, "w") as fp:
|
||||
fp.write(json.dumps(new_syspurpose, indent=2, ensure_ascii=False, sort_keys=True))
|
||||
|
||||
def _read_syspurpose(self):
|
||||
"""
|
||||
Read current syspurpuse from json file.
|
||||
"""
|
||||
current_syspurpose = {}
|
||||
try:
|
||||
with open(self.path, "r") as fp:
|
||||
content = fp.read()
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
current_syspurpose = json.loads(content)
|
||||
return current_syspurpose
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
# Load RHSM configuration from file
|
||||
rhsm = Rhsm(None)
|
||||
|
||||
# Note: the default values for parameters are:
|
||||
# 'type': 'str', 'default': None, 'required': False
|
||||
# So there is no need to repeat these values for each parameter.
|
||||
module = AnsibleModule(
|
||||
argument_spec={
|
||||
'state': {'default': 'present', 'choices': ['present', 'absent']},
|
||||
'username': {},
|
||||
'password': {'no_log': True},
|
||||
'server_hostname': {},
|
||||
'server_insecure': {},
|
||||
'rhsm_baseurl': {},
|
||||
'rhsm_repo_ca_cert': {},
|
||||
'auto_attach': {'aliases': ['autosubscribe'], 'type': 'bool'},
|
||||
'activationkey': {'no_log': True},
|
||||
'org_id': {},
|
||||
'environment': {},
|
||||
'pool': {'default': '^$'},
|
||||
'pool_ids': {'default': [], 'type': 'list'},
|
||||
'consumer_type': {},
|
||||
'consumer_name': {},
|
||||
'consumer_id': {},
|
||||
'force_register': {'default': False, 'type': 'bool'},
|
||||
'server_proxy_hostname': {},
|
||||
'server_proxy_port': {},
|
||||
'server_proxy_user': {},
|
||||
'server_proxy_password': {'no_log': True},
|
||||
'release': {},
|
||||
'syspurpose': {
|
||||
'type': 'dict',
|
||||
'options': {
|
||||
'role': {},
|
||||
'usage': {},
|
||||
'service_level_agreement': {},
|
||||
'addons': {'type': 'list'},
|
||||
'sync': {'type': 'bool', 'default': False}
|
||||
}
|
||||
}
|
||||
},
|
||||
required_together=[['username', 'password'],
|
||||
['server_proxy_hostname', 'server_proxy_port'],
|
||||
['server_proxy_user', 'server_proxy_password']],
|
||||
mutually_exclusive=[['activationkey', 'username'],
|
||||
['activationkey', 'consumer_id'],
|
||||
['activationkey', 'environment'],
|
||||
['activationkey', 'autosubscribe'],
|
||||
['force', 'consumer_id'],
|
||||
['pool', 'pool_ids']],
|
||||
required_if=[['state', 'present', ['username', 'activationkey'], True]],
|
||||
)
|
||||
|
||||
rhsm.module = module
|
||||
state = module.params['state']
|
||||
username = module.params['username']
|
||||
password = module.params['password']
|
||||
server_hostname = module.params['server_hostname']
|
||||
server_insecure = module.params['server_insecure']
|
||||
rhsm_baseurl = module.params['rhsm_baseurl']
|
||||
rhsm_repo_ca_cert = module.params['rhsm_repo_ca_cert']
|
||||
auto_attach = module.params['auto_attach']
|
||||
activationkey = module.params['activationkey']
|
||||
org_id = module.params['org_id']
|
||||
if activationkey and not org_id:
|
||||
module.fail_json(msg='org_id is required when using activationkey')
|
||||
environment = module.params['environment']
|
||||
pool = module.params['pool']
|
||||
pool_ids = {}
|
||||
for value in module.params['pool_ids']:
|
||||
if isinstance(value, dict):
|
||||
if len(value) != 1:
|
||||
module.fail_json(msg='Unable to parse pool_ids option.')
|
||||
pool_id, quantity = list(value.items())[0]
|
||||
else:
|
||||
pool_id, quantity = value, None
|
||||
pool_ids[pool_id] = quantity
|
||||
consumer_type = module.params["consumer_type"]
|
||||
consumer_name = module.params["consumer_name"]
|
||||
consumer_id = module.params["consumer_id"]
|
||||
force_register = module.params["force_register"]
|
||||
server_proxy_hostname = module.params['server_proxy_hostname']
|
||||
server_proxy_port = module.params['server_proxy_port']
|
||||
server_proxy_user = module.params['server_proxy_user']
|
||||
server_proxy_password = module.params['server_proxy_password']
|
||||
release = module.params['release']
|
||||
syspurpose = module.params['syspurpose']
|
||||
|
||||
global SUBMAN_CMD
|
||||
SUBMAN_CMD = module.get_bin_path('subscription-manager', True)
|
||||
|
||||
syspurpose_changed = False
|
||||
if syspurpose is not None:
|
||||
try:
|
||||
syspurpose_changed = SysPurpose().update_syspurpose(syspurpose)
|
||||
except Exception as err:
|
||||
module.fail_json(msg="Failed to update syspurpose attributes: %s" % to_native(err))
|
||||
|
||||
# Ensure system is registered
|
||||
if state == 'present':
|
||||
|
||||
# Register system
|
||||
if rhsm.is_registered and not force_register:
|
||||
if syspurpose and 'sync' in syspurpose and syspurpose['sync'] is True:
|
||||
try:
|
||||
rhsm.sync_syspurpose()
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to synchronize syspurpose attributes: %s" % to_native(e))
|
||||
if pool != '^$' or pool_ids:
|
||||
try:
|
||||
if pool_ids:
|
||||
result = rhsm.update_subscriptions_by_pool_ids(pool_ids)
|
||||
else:
|
||||
result = rhsm.update_subscriptions(pool)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to update subscriptions for '%s': %s" % (server_hostname, to_native(e)))
|
||||
else:
|
||||
module.exit_json(**result)
|
||||
else:
|
||||
if syspurpose_changed is True:
|
||||
module.exit_json(changed=True, msg="Syspurpose attributes changed.")
|
||||
else:
|
||||
module.exit_json(changed=False, msg="System already registered.")
|
||||
else:
|
||||
try:
|
||||
rhsm.enable()
|
||||
rhsm.configure(**module.params)
|
||||
rhsm.register(username, password, auto_attach, activationkey, org_id,
|
||||
consumer_type, consumer_name, consumer_id, force_register,
|
||||
environment, rhsm_baseurl, server_insecure, server_hostname,
|
||||
server_proxy_hostname, server_proxy_port, server_proxy_user, server_proxy_password, release)
|
||||
if syspurpose and 'sync' in syspurpose and syspurpose['sync'] is True:
|
||||
rhsm.sync_syspurpose()
|
||||
if pool_ids:
|
||||
subscribed_pool_ids = rhsm.subscribe_by_pool_ids(pool_ids)
|
||||
elif pool != '^$':
|
||||
subscribed_pool_ids = rhsm.subscribe(pool)
|
||||
else:
|
||||
subscribed_pool_ids = []
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to register with '%s': %s" % (server_hostname, to_native(e)))
|
||||
else:
|
||||
module.exit_json(changed=True,
|
||||
msg="System successfully registered to '%s'." % server_hostname,
|
||||
subscribed_pool_ids=subscribed_pool_ids)
|
||||
|
||||
# Ensure system is *not* registered
|
||||
if state == 'absent':
|
||||
if not rhsm.is_registered:
|
||||
module.exit_json(changed=False, msg="System already unregistered.")
|
||||
else:
|
||||
try:
|
||||
rhsm.unsubscribe()
|
||||
rhsm.unregister()
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to unregister: %s" % to_native(e))
|
||||
else:
|
||||
module.exit_json(changed=True, msg="System successfully unregistered from %s." % server_hostname)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
162
plugins/modules/packaging/os/rhn_channel.py
Normal file
162
plugins/modules/packaging/os/rhn_channel.py
Normal file
@@ -0,0 +1,162 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright: (c) Vincent Van de Kussen
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: rhn_channel
|
||||
short_description: Adds or removes Red Hat software channels
|
||||
description:
|
||||
- Adds or removes Red Hat software channels.
|
||||
author:
|
||||
- Vincent Van der Kussen (@vincentvdk)
|
||||
notes:
|
||||
- This module fetches the system id from RHN.
|
||||
- This module doesn't support I(check_mode).
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- Name of the software channel.
|
||||
required: true
|
||||
sysname:
|
||||
description:
|
||||
- Name of the system as it is known in RHN/Satellite.
|
||||
required: true
|
||||
state:
|
||||
description:
|
||||
- Whether the channel should be present or not, taking action if the state is different from what is stated.
|
||||
default: present
|
||||
url:
|
||||
description:
|
||||
- The full URL to the RHN/Satellite API.
|
||||
required: true
|
||||
user:
|
||||
description:
|
||||
- RHN/Satellite login.
|
||||
required: true
|
||||
password:
|
||||
description:
|
||||
- RHN/Satellite password.
|
||||
required: true
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- rhn_channel:
|
||||
name: rhel-x86_64-server-v2vwin-6
|
||||
sysname: server01
|
||||
url: https://rhn.redhat.com/rpc/api
|
||||
user: rhnuser
|
||||
password: guessme
|
||||
delegate_to: localhost
|
||||
'''
|
||||
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six.moves import xmlrpc_client
|
||||
|
||||
|
||||
def get_systemid(client, session, sysname):
|
||||
systems = client.system.listUserSystems(session)
|
||||
for system in systems:
|
||||
if system.get('name') == sysname:
|
||||
idres = system.get('id')
|
||||
idd = int(idres)
|
||||
return idd
|
||||
|
||||
|
||||
def subscribe_channels(channelname, client, session, sysname, sys_id):
|
||||
channels = base_channels(client, session, sys_id)
|
||||
channels.append(channelname)
|
||||
return client.system.setChildChannels(session, sys_id, channels)
|
||||
|
||||
|
||||
def unsubscribe_channels(channelname, client, session, sysname, sys_id):
|
||||
channels = base_channels(client, session, sys_id)
|
||||
channels.remove(channelname)
|
||||
return client.system.setChildChannels(session, sys_id, channels)
|
||||
|
||||
|
||||
def base_channels(client, session, sys_id):
|
||||
basechan = client.channel.software.listSystemChannels(session, sys_id)
|
||||
try:
|
||||
chans = [item['label'] for item in basechan]
|
||||
except KeyError:
|
||||
chans = [item['channel_label'] for item in basechan]
|
||||
return chans
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
state=dict(type='str', default='present', choices=['present', 'absent']),
|
||||
name=dict(type='str', required=True),
|
||||
sysname=dict(type='str', required=True),
|
||||
url=dict(type='str', required=True),
|
||||
user=dict(type='str', required=True),
|
||||
password=dict(type='str', required=True, aliases=['pwd'], no_log=True),
|
||||
)
|
||||
)
|
||||
|
||||
state = module.params['state']
|
||||
channelname = module.params['name']
|
||||
systname = module.params['sysname']
|
||||
saturl = module.params['url']
|
||||
user = module.params['user']
|
||||
password = module.params['password']
|
||||
|
||||
# initialize connection
|
||||
client = xmlrpc_client.ServerProxy(saturl)
|
||||
try:
|
||||
session = client.auth.login(user, password)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Unable to establish session with Satellite server: %s " % to_text(e))
|
||||
|
||||
if not session:
|
||||
module.fail_json(msg="Failed to establish session with Satellite server.")
|
||||
|
||||
# get systemid
|
||||
try:
|
||||
sys_id = get_systemid(client, session, systname)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Unable to get system id: %s " % to_text(e))
|
||||
|
||||
if not sys_id:
|
||||
module.fail_json(msg="Failed to get system id.")
|
||||
|
||||
# get channels for system
|
||||
try:
|
||||
chans = base_channels(client, session, sys_id)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Unable to get channel information: %s " % to_text(e))
|
||||
|
||||
try:
|
||||
if state == 'present':
|
||||
if channelname in chans:
|
||||
module.exit_json(changed=False, msg="Channel %s already exists" % channelname)
|
||||
else:
|
||||
subscribe_channels(channelname, client, session, systname, sys_id)
|
||||
module.exit_json(changed=True, msg="Channel %s added" % channelname)
|
||||
|
||||
if state == 'absent':
|
||||
if channelname not in chans:
|
||||
module.exit_json(changed=False, msg="Not subscribed to channel %s." % channelname)
|
||||
else:
|
||||
unsubscribe_channels(channelname, client, session, systname, sys_id)
|
||||
module.exit_json(changed=True, msg="Channel %s removed" % channelname)
|
||||
except Exception as e:
|
||||
module.fail_json(msg='Unable to %s channel (%s): %s' % ('add' if state == 'present' else 'remove', channelname, to_text(e)))
|
||||
finally:
|
||||
client.auth.logout(session)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
434
plugins/modules/packaging/os/rhn_register.py
Normal file
434
plugins/modules/packaging/os/rhn_register.py
Normal file
@@ -0,0 +1,434 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) James Laska
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: rhn_register
|
||||
short_description: Manage Red Hat Network registration using the C(rhnreg_ks) command
|
||||
description:
|
||||
- Manage registration to the Red Hat Network.
|
||||
author:
|
||||
- James Laska (@jlaska)
|
||||
notes:
|
||||
- This is for older Red Hat products. You probably want the M(redhat_subscription) module instead.
|
||||
- In order to register a system, C(rhnreg_ks) requires either a username and password, or an activationkey.
|
||||
requirements:
|
||||
- rhnreg_ks
|
||||
- either libxml2 or lxml
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- Whether to register (C(present)), or unregister (C(absent)) a system.
|
||||
type: str
|
||||
choices: [ absent, present ]
|
||||
default: present
|
||||
username:
|
||||
description:
|
||||
- Red Hat Network username.
|
||||
type: str
|
||||
password:
|
||||
description:
|
||||
- Red Hat Network password.
|
||||
type: str
|
||||
server_url:
|
||||
description:
|
||||
- Specify an alternative Red Hat Network server URL.
|
||||
- The default is the current value of I(serverURL) from C(/etc/sysconfig/rhn/up2date).
|
||||
type: str
|
||||
activationkey:
|
||||
description:
|
||||
- Supply an activation key for use with registration.
|
||||
type: str
|
||||
profilename:
|
||||
description:
|
||||
- Supply an profilename for use with registration.
|
||||
type: str
|
||||
ca_cert:
|
||||
description:
|
||||
- Supply a custom ssl CA certificate file for use with registration.
|
||||
type: path
|
||||
aliases: [ sslcacert ]
|
||||
systemorgid:
|
||||
description:
|
||||
- Supply an organizational id for use with registration.
|
||||
type: str
|
||||
channels:
|
||||
description:
|
||||
- Optionally specify a list of channels to subscribe to upon successful registration.
|
||||
type: list
|
||||
default: []
|
||||
enable_eus:
|
||||
description:
|
||||
- If C(no), extended update support will be requested.
|
||||
type: bool
|
||||
default: no
|
||||
nopackages:
|
||||
description:
|
||||
- If C(yes), the registered node will not upload its installed packages information to Satellite server.
|
||||
type: bool
|
||||
default: no
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: Unregister system from RHN
|
||||
rhn_register:
|
||||
state: absent
|
||||
username: joe_user
|
||||
password: somepass
|
||||
|
||||
- name: Register as user with password and auto-subscribe to available content
|
||||
rhn_register:
|
||||
state: present
|
||||
username: joe_user
|
||||
password: somepass
|
||||
|
||||
- name: Register with activationkey and enable extended update support
|
||||
rhn_register:
|
||||
state: present
|
||||
activationkey: 1-222333444
|
||||
enable_eus: yes
|
||||
|
||||
- name: Register with activationkey and set a profilename which may differ from the hostname
|
||||
rhn_register:
|
||||
state: present
|
||||
activationkey: 1-222333444
|
||||
profilename: host.example.com.custom
|
||||
|
||||
- name: Register as user with password against a satellite server
|
||||
rhn_register:
|
||||
state: present
|
||||
username: joe_user
|
||||
password: somepass
|
||||
server_url: https://xmlrpc.my.satellite/XMLRPC
|
||||
|
||||
- name: Register as user with password and enable channels
|
||||
rhn_register:
|
||||
state: present
|
||||
username: joe_user
|
||||
password: somepass
|
||||
channels: rhel-x86_64-server-6-foo-1,rhel-x86_64-server-6-bar-1
|
||||
'''
|
||||
|
||||
RETURN = r'''
|
||||
# Default return values
|
||||
'''
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Attempt to import rhn client tools
|
||||
sys.path.insert(0, '/usr/share/rhn')
|
||||
try:
|
||||
import up2date_client
|
||||
import up2date_client.config
|
||||
HAS_UP2DATE_CLIENT = True
|
||||
except ImportError:
|
||||
HAS_UP2DATE_CLIENT = False
|
||||
|
||||
# INSERT REDHAT SNIPPETS
|
||||
from ansible_collections.community.general.plugins.module_utils import redhat
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six.moves import urllib, xmlrpc_client
|
||||
|
||||
|
||||
class Rhn(redhat.RegistrationBase):
|
||||
|
||||
def __init__(self, module=None, username=None, password=None):
|
||||
redhat.RegistrationBase.__init__(self, module, username, password)
|
||||
self.config = self.load_config()
|
||||
self.server = None
|
||||
self.session = None
|
||||
|
||||
def logout(self):
|
||||
if self.session is not None:
|
||||
self.server.auth.logout(self.session)
|
||||
|
||||
def load_config(self):
|
||||
'''
|
||||
Read configuration from /etc/sysconfig/rhn/up2date
|
||||
'''
|
||||
if not HAS_UP2DATE_CLIENT:
|
||||
return None
|
||||
|
||||
config = up2date_client.config.initUp2dateConfig()
|
||||
|
||||
return config
|
||||
|
||||
@property
|
||||
def server_url(self):
|
||||
return self.config['serverURL']
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
'''
|
||||
Return the non-xmlrpc RHN hostname. This is a convenience method
|
||||
used for displaying a more readable RHN hostname.
|
||||
|
||||
Returns: str
|
||||
'''
|
||||
url = urllib.parse.urlparse(self.server_url)
|
||||
return url[1].replace('xmlrpc.', '')
|
||||
|
||||
@property
|
||||
def systemid(self):
|
||||
systemid = None
|
||||
xpath_str = "//member[name='system_id']/value/string"
|
||||
|
||||
if os.path.isfile(self.config['systemIdPath']):
|
||||
fd = open(self.config['systemIdPath'], 'r')
|
||||
xml_data = fd.read()
|
||||
fd.close()
|
||||
|
||||
# Ugh, xml parsing time ...
|
||||
# First, try parsing with libxml2 ...
|
||||
if systemid is None:
|
||||
try:
|
||||
import libxml2
|
||||
doc = libxml2.parseDoc(xml_data)
|
||||
ctxt = doc.xpathNewContext()
|
||||
systemid = ctxt.xpathEval(xpath_str)[0].content
|
||||
doc.freeDoc()
|
||||
ctxt.xpathFreeContext()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# m-kay, let's try with lxml now ...
|
||||
if systemid is None:
|
||||
try:
|
||||
from lxml import etree
|
||||
root = etree.fromstring(xml_data)
|
||||
systemid = root.xpath(xpath_str)[0].text
|
||||
except ImportError:
|
||||
raise Exception('"libxml2" or "lxml" is required for this module.')
|
||||
|
||||
# Strip the 'ID-' prefix
|
||||
if systemid is not None and systemid.startswith('ID-'):
|
||||
systemid = systemid[3:]
|
||||
|
||||
return int(systemid)
|
||||
|
||||
@property
|
||||
def is_registered(self):
|
||||
'''
|
||||
Determine whether the current system is registered.
|
||||
|
||||
Returns: True|False
|
||||
'''
|
||||
return os.path.isfile(self.config['systemIdPath'])
|
||||
|
||||
def configure_server_url(self, server_url):
|
||||
'''
|
||||
Configure server_url for registration
|
||||
'''
|
||||
|
||||
self.config.set('serverURL', server_url)
|
||||
self.config.save()
|
||||
|
||||
def enable(self):
|
||||
'''
|
||||
Prepare the system for RHN registration. This includes ...
|
||||
* enabling the rhnplugin yum plugin
|
||||
* disabling the subscription-manager yum plugin
|
||||
'''
|
||||
redhat.RegistrationBase.enable(self)
|
||||
self.update_plugin_conf('rhnplugin', True)
|
||||
self.update_plugin_conf('subscription-manager', False)
|
||||
|
||||
def register(self, enable_eus=False, activationkey=None, profilename=None, sslcacert=None, systemorgid=None, nopackages=False):
|
||||
'''
|
||||
Register system to RHN. If enable_eus=True, extended update
|
||||
support will be requested.
|
||||
'''
|
||||
register_cmd = ['/usr/sbin/rhnreg_ks', '--force']
|
||||
if self.username:
|
||||
register_cmd.extend(['--username', self.username, '--password', self.password])
|
||||
if self.server_url:
|
||||
register_cmd.extend(['--serverUrl', self.server_url])
|
||||
if enable_eus:
|
||||
register_cmd.append('--use-eus-channel')
|
||||
if nopackages:
|
||||
register_cmd.append('--nopackages')
|
||||
if activationkey is not None:
|
||||
register_cmd.extend(['--activationkey', activationkey])
|
||||
if profilename is not None:
|
||||
register_cmd.extend(['--profilename', profilename])
|
||||
if sslcacert is not None:
|
||||
register_cmd.extend(['--sslCACert', sslcacert])
|
||||
if systemorgid is not None:
|
||||
register_cmd.extend(['--systemorgid', systemorgid])
|
||||
rc, stdout, stderr = self.module.run_command(register_cmd, check_rc=True)
|
||||
|
||||
def api(self, method, *args):
|
||||
'''
|
||||
Convenience RPC wrapper
|
||||
'''
|
||||
if self.server is None:
|
||||
if self.hostname != 'rhn.redhat.com':
|
||||
url = "https://%s/rpc/api" % self.hostname
|
||||
else:
|
||||
url = "https://xmlrpc.%s/rpc/api" % self.hostname
|
||||
self.server = xmlrpc_client.ServerProxy(url)
|
||||
self.session = self.server.auth.login(self.username, self.password)
|
||||
|
||||
func = getattr(self.server, method)
|
||||
return func(self.session, *args)
|
||||
|
||||
def unregister(self):
|
||||
'''
|
||||
Unregister a previously registered system
|
||||
'''
|
||||
|
||||
# Initiate RPC connection
|
||||
self.api('system.deleteSystems', [self.systemid])
|
||||
|
||||
# Remove systemid file
|
||||
os.unlink(self.config['systemIdPath'])
|
||||
|
||||
def subscribe(self, channels):
|
||||
if not channels:
|
||||
return
|
||||
|
||||
if self._is_hosted():
|
||||
current_channels = self.api('channel.software.listSystemChannels', self.systemid)
|
||||
new_channels = [item['channel_label'] for item in current_channels]
|
||||
new_channels.extend(channels)
|
||||
return self.api('channel.software.setSystemChannels', self.systemid, list(new_channels))
|
||||
|
||||
else:
|
||||
current_channels = self.api('channel.software.listSystemChannels', self.systemid)
|
||||
current_channels = [item['label'] for item in current_channels]
|
||||
new_base = None
|
||||
new_childs = []
|
||||
for ch in channels:
|
||||
if ch in current_channels:
|
||||
continue
|
||||
if self.api('channel.software.getDetails', ch)['parent_channel_label'] == '':
|
||||
new_base = ch
|
||||
else:
|
||||
if ch not in new_childs:
|
||||
new_childs.append(ch)
|
||||
out_base = 0
|
||||
out_childs = 0
|
||||
|
||||
if new_base:
|
||||
out_base = self.api('system.setBaseChannel', self.systemid, new_base)
|
||||
|
||||
if new_childs:
|
||||
out_childs = self.api('system.setChildChannels', self.systemid, new_childs)
|
||||
|
||||
return out_base and out_childs
|
||||
|
||||
def _is_hosted(self):
|
||||
'''
|
||||
Return True if we are running against Hosted (rhn.redhat.com) or
|
||||
False otherwise (when running against Satellite or Spacewalk)
|
||||
'''
|
||||
return 'rhn.redhat.com' in self.hostname
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
state=dict(type='str', default='present', choices=['absent', 'present']),
|
||||
username=dict(type='str'),
|
||||
password=dict(type='str', no_log=True),
|
||||
server_url=dict(type='str'),
|
||||
activationkey=dict(type='str', no_log=True),
|
||||
profilename=dict(type='str'),
|
||||
ca_cert=dict(type='path', aliases=['sslcacert']),
|
||||
systemorgid=dict(type='str'),
|
||||
enable_eus=dict(type='bool', default=False),
|
||||
nopackages=dict(type='bool', default=False),
|
||||
channels=dict(type='list', default=[]),
|
||||
),
|
||||
# username/password is required for state=absent, or if channels is not empty
|
||||
# (basically anything that uses self.api requires username/password) but it doesn't
|
||||
# look like we can express that with required_if/required_together/mutually_exclusive
|
||||
|
||||
# only username+password can be used for unregister
|
||||
required_if=[['state', 'absent', ['username', 'password']]],
|
||||
)
|
||||
|
||||
if not HAS_UP2DATE_CLIENT:
|
||||
module.fail_json(msg="Unable to import up2date_client. Is 'rhn-client-tools' installed?")
|
||||
|
||||
server_url = module.params['server_url']
|
||||
username = module.params['username']
|
||||
password = module.params['password']
|
||||
|
||||
state = module.params['state']
|
||||
activationkey = module.params['activationkey']
|
||||
profilename = module.params['profilename']
|
||||
sslcacert = module.params['ca_cert']
|
||||
systemorgid = module.params['systemorgid']
|
||||
channels = module.params['channels']
|
||||
enable_eus = module.params['enable_eus']
|
||||
nopackages = module.params['nopackages']
|
||||
|
||||
rhn = Rhn(module=module, username=username, password=password)
|
||||
|
||||
# use the provided server url and persist it to the rhn config.
|
||||
if server_url:
|
||||
rhn.configure_server_url(server_url)
|
||||
|
||||
if not rhn.server_url:
|
||||
module.fail_json(
|
||||
msg="No serverURL was found (from either the 'server_url' module arg or the config file option 'serverURL' in /etc/sysconfig/rhn/up2date)"
|
||||
)
|
||||
|
||||
# Ensure system is registered
|
||||
if state == 'present':
|
||||
|
||||
# Check for missing parameters ...
|
||||
if not (activationkey or rhn.username or rhn.password):
|
||||
module.fail_json(msg="Missing arguments, must supply an activationkey (%s) or username (%s) and password (%s)" % (activationkey, rhn.username,
|
||||
rhn.password))
|
||||
if not activationkey and not (rhn.username and rhn.password):
|
||||
module.fail_json(msg="Missing arguments, If registering without an activationkey, must supply username or password")
|
||||
|
||||
# Register system
|
||||
if rhn.is_registered:
|
||||
module.exit_json(changed=False, msg="System already registered.")
|
||||
|
||||
try:
|
||||
rhn.enable()
|
||||
rhn.register(enable_eus, activationkey, profilename, sslcacert, systemorgid, nopackages)
|
||||
rhn.subscribe(channels)
|
||||
except Exception as exc:
|
||||
module.fail_json(msg="Failed to register with '%s': %s" % (rhn.hostname, exc))
|
||||
finally:
|
||||
rhn.logout()
|
||||
|
||||
module.exit_json(changed=True, msg="System successfully registered to '%s'." % rhn.hostname)
|
||||
|
||||
# Ensure system is *not* registered
|
||||
if state == 'absent':
|
||||
if not rhn.is_registered:
|
||||
module.exit_json(changed=False, msg="System already unregistered.")
|
||||
|
||||
if not (rhn.username and rhn.password):
|
||||
module.fail_json(msg="Missing arguments, the system is currently registered and unregistration requires a username and password")
|
||||
|
||||
try:
|
||||
rhn.unregister()
|
||||
except Exception as exc:
|
||||
module.fail_json(msg="Failed to unregister: %s" % exc)
|
||||
finally:
|
||||
rhn.logout()
|
||||
|
||||
module.exit_json(changed=True, msg="System successfully unregistered from %s." % rhn.hostname)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
129
plugins/modules/packaging/os/rhsm_release.py
Normal file
129
plugins/modules/packaging/os/rhsm_release.py
Normal file
@@ -0,0 +1,129 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# (c) 2018, Sean Myers <sean.myers@redhat.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: rhsm_release
|
||||
short_description: Set or Unset RHSM Release version
|
||||
description:
|
||||
- Sets or unsets the release version used by RHSM repositories.
|
||||
notes:
|
||||
- This module will fail on an unregistered system.
|
||||
Use the C(redhat_subscription) module to register a system
|
||||
prior to setting the RHSM release.
|
||||
requirements:
|
||||
- Red Hat Enterprise Linux 6+ with subscription-manager installed
|
||||
options:
|
||||
release:
|
||||
description:
|
||||
- RHSM release version to use (use null to unset)
|
||||
required: true
|
||||
author:
|
||||
- Sean Myers (@seandst)
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Set release version to 7.1
|
||||
- name: Set RHSM release version
|
||||
rhsm_release:
|
||||
release: "7.1"
|
||||
|
||||
# Set release version to 6Server
|
||||
- name: Set RHSM release version
|
||||
rhsm_release:
|
||||
release: "6Server"
|
||||
|
||||
# Unset release version
|
||||
- name: Unset RHSM release release
|
||||
rhsm_release:
|
||||
release: null
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
current_release:
|
||||
description: The current RHSM release version value
|
||||
returned: success
|
||||
type: str
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
import re
|
||||
|
||||
# Matches release-like values such as 7.2, 6.10, 10Server,
|
||||
# but rejects unlikely values, like 100Server, 100.0, 1.100, etc.
|
||||
release_matcher = re.compile(r'\b\d{1,2}(?:\.\d{1,2}|Server)\b')
|
||||
|
||||
|
||||
def _sm_release(module, *args):
|
||||
# pass args to s-m release, e.g. _sm_release(module, '--set', '0.1') becomes
|
||||
# "subscription-manager release --set 0.1"
|
||||
sm_bin = module.get_bin_path('subscription-manager', required=True)
|
||||
cmd = '{0} release {1}'.format(sm_bin, " ".join(args))
|
||||
# delegate nonzero rc handling to run_command
|
||||
return module.run_command(cmd, check_rc=True)
|
||||
|
||||
|
||||
def get_release(module):
|
||||
# Get the current release version, or None if release unset
|
||||
rc, out, err = _sm_release(module, '--show')
|
||||
try:
|
||||
match = release_matcher.findall(out)[0]
|
||||
except IndexError:
|
||||
# 0'th index did not exist; no matches
|
||||
match = None
|
||||
|
||||
return match
|
||||
|
||||
|
||||
def set_release(module, release):
|
||||
# Set current release version, or unset if release is None
|
||||
if release is None:
|
||||
args = ('--unset',)
|
||||
else:
|
||||
args = ('--set', release)
|
||||
|
||||
return _sm_release(module, *args)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
release=dict(type='str', required=True),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
target_release = module.params['release']
|
||||
|
||||
# sanity check: the target release at least looks like a valid release
|
||||
if target_release and not release_matcher.findall(target_release):
|
||||
module.fail_json(msg='"{0}" does not appear to be a valid release.'.format(target_release))
|
||||
|
||||
# Will fail with useful error from s-m if system not subscribed
|
||||
current_release = get_release(module)
|
||||
|
||||
changed = (target_release != current_release)
|
||||
if not module.check_mode and changed:
|
||||
set_release(module, target_release)
|
||||
# If setting the release fails, then a fail_json would have exited with
|
||||
# the s-m error, e.g. "No releases match '7.20'...". If not, then the
|
||||
# current release is now set to the target release (job's done)
|
||||
current_release = target_release
|
||||
|
||||
module.exit_json(current_release=current_release, changed=changed)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
247
plugins/modules/packaging/os/rhsm_repository.py
Normal file
247
plugins/modules/packaging/os/rhsm_repository.py
Normal file
@@ -0,0 +1,247 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright: (c) 2017, Giovanni Sciortino (@giovannisciortino)
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: rhsm_repository
|
||||
short_description: Manage RHSM repositories using the subscription-manager command
|
||||
description:
|
||||
- Manage (Enable/Disable) RHSM repositories to the Red Hat Subscription
|
||||
Management entitlement platform using the C(subscription-manager) command.
|
||||
author: Giovanni Sciortino (@giovannisciortino)
|
||||
notes:
|
||||
- In order to manage RHSM repositories the system must be already registered
|
||||
to RHSM manually or using the Ansible C(redhat_subscription) module.
|
||||
|
||||
requirements:
|
||||
- subscription-manager
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- If state is equal to present or disabled, indicates the desired
|
||||
repository state.
|
||||
choices: [present, enabled, absent, disabled]
|
||||
required: True
|
||||
default: "present"
|
||||
name:
|
||||
description:
|
||||
- The ID of repositories to enable.
|
||||
- To operate on several repositories this can accept a comma separated
|
||||
list or a YAML list.
|
||||
required: True
|
||||
purge:
|
||||
description:
|
||||
- Disable all currently enabled repositories that are not not specified in C(name).
|
||||
Only set this to C(True) if passing in a list of repositories to the C(name) field.
|
||||
Using this with C(loop) will most likely not have the desired result.
|
||||
type: bool
|
||||
default: False
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Enable a RHSM repository
|
||||
rhsm_repository:
|
||||
name: rhel-7-server-rpms
|
||||
|
||||
- name: Disable all RHSM repositories
|
||||
rhsm_repository:
|
||||
name: '*'
|
||||
state: disabled
|
||||
|
||||
- name: Enable all repositories starting with rhel-6-server
|
||||
rhsm_repository:
|
||||
name: rhel-6-server*
|
||||
state: enabled
|
||||
|
||||
- name: Disable all repositories except rhel-7-server-rpms
|
||||
rhsm_repository:
|
||||
name: rhel-7-server-rpms
|
||||
purge: True
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
repositories:
|
||||
description:
|
||||
- The list of RHSM repositories with their states.
|
||||
- When this module is used to change the repository states, this list contains the updated states after the changes.
|
||||
returned: success
|
||||
type: list
|
||||
'''
|
||||
|
||||
import re
|
||||
import os
|
||||
from fnmatch import fnmatch
|
||||
from copy import deepcopy
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def run_subscription_manager(module, arguments):
|
||||
# Execute subscription-manager with arguments and manage common errors
|
||||
rhsm_bin = module.get_bin_path('subscription-manager')
|
||||
if not rhsm_bin:
|
||||
module.fail_json(msg='The executable file subscription-manager was not found in PATH')
|
||||
|
||||
lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
|
||||
rc, out, err = module.run_command("%s %s" % (rhsm_bin, " ".join(arguments)), environ_update=lang_env)
|
||||
|
||||
if rc == 1 and (err == 'The password you typed is invalid.\nPlease try again.\n' or os.getuid() != 0):
|
||||
module.fail_json(msg='The executable file subscription-manager must be run using root privileges')
|
||||
elif rc == 0 and out == 'This system has no repositories available through subscriptions.\n':
|
||||
module.fail_json(msg='This system has no repositories available through subscriptions')
|
||||
elif rc == 1:
|
||||
module.fail_json(msg='subscription-manager failed with the following error: %s' % err)
|
||||
else:
|
||||
return rc, out, err
|
||||
|
||||
|
||||
def get_repository_list(module, list_parameter):
|
||||
# Generate RHSM repository list and return a list of dict
|
||||
if list_parameter == 'list_enabled':
|
||||
rhsm_arguments = ['repos', '--list-enabled']
|
||||
elif list_parameter == 'list_disabled':
|
||||
rhsm_arguments = ['repos', '--list-disabled']
|
||||
elif list_parameter == 'list':
|
||||
rhsm_arguments = ['repos', '--list']
|
||||
rc, out, err = run_subscription_manager(module, rhsm_arguments)
|
||||
|
||||
skip_lines = [
|
||||
'+----------------------------------------------------------+',
|
||||
' Available Repositories in /etc/yum.repos.d/redhat.repo'
|
||||
]
|
||||
repo_id_re = re.compile(r'Repo ID:\s+(.*)')
|
||||
repo_name_re = re.compile(r'Repo Name:\s+(.*)')
|
||||
repo_url_re = re.compile(r'Repo URL:\s+(.*)')
|
||||
repo_enabled_re = re.compile(r'Enabled:\s+(.*)')
|
||||
|
||||
repo_id = ''
|
||||
repo_name = ''
|
||||
repo_url = ''
|
||||
repo_enabled = ''
|
||||
|
||||
repo_result = []
|
||||
for line in out.splitlines():
|
||||
if line == '' or line in skip_lines:
|
||||
continue
|
||||
|
||||
repo_id_match = repo_id_re.match(line)
|
||||
if repo_id_match:
|
||||
repo_id = repo_id_match.group(1)
|
||||
continue
|
||||
|
||||
repo_name_match = repo_name_re.match(line)
|
||||
if repo_name_match:
|
||||
repo_name = repo_name_match.group(1)
|
||||
continue
|
||||
|
||||
repo_url_match = repo_url_re.match(line)
|
||||
if repo_url_match:
|
||||
repo_url = repo_url_match.group(1)
|
||||
continue
|
||||
|
||||
repo_enabled_match = repo_enabled_re.match(line)
|
||||
if repo_enabled_match:
|
||||
repo_enabled = repo_enabled_match.group(1)
|
||||
|
||||
repo = {
|
||||
"id": repo_id,
|
||||
"name": repo_name,
|
||||
"url": repo_url,
|
||||
"enabled": True if repo_enabled == '1' else False
|
||||
}
|
||||
|
||||
repo_result.append(repo)
|
||||
|
||||
return repo_result
|
||||
|
||||
|
||||
def repository_modify(module, state, name, purge=False):
|
||||
name = set(name)
|
||||
current_repo_list = get_repository_list(module, 'list')
|
||||
updated_repo_list = deepcopy(current_repo_list)
|
||||
matched_existing_repo = {}
|
||||
for repoid in name:
|
||||
matched_existing_repo[repoid] = []
|
||||
for idx, repo in enumerate(current_repo_list):
|
||||
if fnmatch(repo['id'], repoid):
|
||||
matched_existing_repo[repoid].append(repo)
|
||||
# Update current_repo_list to return it as result variable
|
||||
updated_repo_list[idx]['enabled'] = True if state == 'enabled' else False
|
||||
|
||||
changed = False
|
||||
results = []
|
||||
diff_before = ""
|
||||
diff_after = ""
|
||||
rhsm_arguments = ['repos']
|
||||
|
||||
for repoid in matched_existing_repo:
|
||||
if len(matched_existing_repo[repoid]) == 0:
|
||||
results.append("%s is not a valid repository ID" % repoid)
|
||||
module.fail_json(results=results, msg="%s is not a valid repository ID" % repoid)
|
||||
for repo in matched_existing_repo[repoid]:
|
||||
if state in ['disabled', 'absent']:
|
||||
if repo['enabled']:
|
||||
changed = True
|
||||
diff_before += "Repository '%s' is enabled for this system\n" % repo['id']
|
||||
diff_after += "Repository '%s' is disabled for this system\n" % repo['id']
|
||||
results.append("Repository '%s' is disabled for this system" % repo['id'])
|
||||
rhsm_arguments += ['--disable', repo['id']]
|
||||
elif state in ['enabled', 'present']:
|
||||
if not repo['enabled']:
|
||||
changed = True
|
||||
diff_before += "Repository '%s' is disabled for this system\n" % repo['id']
|
||||
diff_after += "Repository '%s' is enabled for this system\n" % repo['id']
|
||||
results.append("Repository '%s' is enabled for this system" % repo['id'])
|
||||
rhsm_arguments += ['--enable', repo['id']]
|
||||
|
||||
# Disable all enabled repos on the system that are not in the task and not
|
||||
# marked as disabled by the task
|
||||
if purge:
|
||||
enabled_repo_ids = set(repo['id'] for repo in updated_repo_list if repo['enabled'])
|
||||
matched_repoids_set = set(matched_existing_repo.keys())
|
||||
difference = enabled_repo_ids.difference(matched_repoids_set)
|
||||
if len(difference) > 0:
|
||||
for repoid in difference:
|
||||
changed = True
|
||||
diff_before.join("Repository '{repoid}'' is enabled for this system\n".format(repoid=repoid))
|
||||
diff_after.join("Repository '{repoid}' is disabled for this system\n".format(repoid=repoid))
|
||||
results.append("Repository '{repoid}' is disabled for this system".format(repoid=repoid))
|
||||
rhsm_arguments.extend(['--disable', repoid])
|
||||
|
||||
diff = {'before': diff_before,
|
||||
'after': diff_after,
|
||||
'before_header': "RHSM repositories",
|
||||
'after_header': "RHSM repositories"}
|
||||
|
||||
if not module.check_mode:
|
||||
rc, out, err = run_subscription_manager(module, rhsm_arguments)
|
||||
results = out.splitlines()
|
||||
module.exit_json(results=results, changed=changed, repositories=updated_repo_list, diff=diff)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='list', required=True),
|
||||
state=dict(choices=['enabled', 'disabled', 'present', 'absent'], default='enabled'),
|
||||
purge=dict(type='bool', default=False),
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
name = module.params['name']
|
||||
state = module.params['state']
|
||||
purge = module.params['purge']
|
||||
|
||||
repository_modify(module, state, name, purge)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
201
plugins/modules/packaging/os/slackpkg.py
Normal file
201
plugins/modules/packaging/os/slackpkg.py
Normal file
@@ -0,0 +1,201 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2014, Kim Nørgaard
|
||||
# Written by Kim Nørgaard <jasen@jasen.dk>
|
||||
# Based on pkgng module written by bleader <bleader@ratonland.org>
|
||||
# that was based on pkgin module written by Shaun Zinck <shaun.zinck at gmail.com>
|
||||
# that was based on pacman module written by Afterburn <https://github.com/afterburn>
|
||||
# that was based on apt module written by Matthew Williams <matthew@flowroute.com>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: slackpkg
|
||||
short_description: Package manager for Slackware >= 12.2
|
||||
description:
|
||||
- Manage binary packages for Slackware using 'slackpkg' which
|
||||
is available in versions after 12.2.
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- name of package to install/remove
|
||||
required: true
|
||||
|
||||
state:
|
||||
description:
|
||||
- state of the package, you can use "installed" as an alias for C(present) and removed as one for C(absent).
|
||||
choices: [ 'present', 'absent', 'latest' ]
|
||||
required: false
|
||||
default: present
|
||||
|
||||
update_cache:
|
||||
description:
|
||||
- update the package database first
|
||||
required: false
|
||||
default: false
|
||||
type: bool
|
||||
|
||||
author: Kim Nørgaard (@KimNorgaard)
|
||||
requirements: [ "Slackware >= 12.2" ]
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Install package foo
|
||||
- slackpkg:
|
||||
name: foo
|
||||
state: present
|
||||
|
||||
# Remove packages foo and bar
|
||||
- slackpkg:
|
||||
name: foo,bar
|
||||
state: absent
|
||||
|
||||
# Make sure that it is the most updated package
|
||||
- slackpkg:
|
||||
name: foo
|
||||
state: latest
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def query_package(module, slackpkg_path, name):
|
||||
|
||||
import glob
|
||||
import platform
|
||||
|
||||
machine = platform.machine()
|
||||
packages = glob.glob("/var/log/packages/%s-*-[%s|noarch]*" % (name,
|
||||
machine))
|
||||
|
||||
if len(packages) > 0:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def remove_packages(module, slackpkg_path, packages):
|
||||
|
||||
remove_c = 0
|
||||
# Using a for loop in case of error, we can report the package that failed
|
||||
for package in packages:
|
||||
# Query the package first, to see if we even need to remove
|
||||
if not query_package(module, slackpkg_path, package):
|
||||
continue
|
||||
|
||||
if not module.check_mode:
|
||||
rc, out, err = module.run_command("%s -default_answer=y -batch=on \
|
||||
remove %s" % (slackpkg_path,
|
||||
package))
|
||||
|
||||
if not module.check_mode and query_package(module, slackpkg_path,
|
||||
package):
|
||||
module.fail_json(msg="failed to remove %s: %s" % (package, out))
|
||||
|
||||
remove_c += 1
|
||||
|
||||
if remove_c > 0:
|
||||
|
||||
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
|
||||
|
||||
module.exit_json(changed=False, msg="package(s) already absent")
|
||||
|
||||
|
||||
def install_packages(module, slackpkg_path, packages):
|
||||
|
||||
install_c = 0
|
||||
|
||||
for package in packages:
|
||||
if query_package(module, slackpkg_path, package):
|
||||
continue
|
||||
|
||||
if not module.check_mode:
|
||||
rc, out, err = module.run_command("%s -default_answer=y -batch=on \
|
||||
install %s" % (slackpkg_path,
|
||||
package))
|
||||
|
||||
if not module.check_mode and not query_package(module, slackpkg_path,
|
||||
package):
|
||||
module.fail_json(msg="failed to install %s: %s" % (package, out),
|
||||
stderr=err)
|
||||
|
||||
install_c += 1
|
||||
|
||||
if install_c > 0:
|
||||
module.exit_json(changed=True, msg="present %s package(s)"
|
||||
% (install_c))
|
||||
|
||||
module.exit_json(changed=False, msg="package(s) already present")
|
||||
|
||||
|
||||
def upgrade_packages(module, slackpkg_path, packages):
|
||||
install_c = 0
|
||||
|
||||
for package in packages:
|
||||
if not module.check_mode:
|
||||
rc, out, err = module.run_command("%s -default_answer=y -batch=on \
|
||||
upgrade %s" % (slackpkg_path,
|
||||
package))
|
||||
|
||||
if not module.check_mode and not query_package(module, slackpkg_path,
|
||||
package):
|
||||
module.fail_json(msg="failed to install %s: %s" % (package, out),
|
||||
stderr=err)
|
||||
|
||||
install_c += 1
|
||||
|
||||
if install_c > 0:
|
||||
module.exit_json(changed=True, msg="present %s package(s)"
|
||||
% (install_c))
|
||||
|
||||
module.exit_json(changed=False, msg="package(s) already present")
|
||||
|
||||
|
||||
def update_cache(module, slackpkg_path):
|
||||
rc, out, err = module.run_command("%s -batch=on update" % (slackpkg_path))
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Could not update package cache")
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
state=dict(default="installed", choices=['installed', 'removed', 'absent', 'present', 'latest']),
|
||||
name=dict(aliases=["pkg"], required=True, type='list'),
|
||||
update_cache=dict(default=False, aliases=["update-cache"],
|
||||
type='bool'),
|
||||
),
|
||||
supports_check_mode=True)
|
||||
|
||||
slackpkg_path = module.get_bin_path('slackpkg', True)
|
||||
|
||||
p = module.params
|
||||
|
||||
pkgs = p['name']
|
||||
|
||||
if p["update_cache"]:
|
||||
update_cache(module, slackpkg_path)
|
||||
|
||||
if p['state'] == 'latest':
|
||||
upgrade_packages(module, slackpkg_path, pkgs)
|
||||
|
||||
elif p['state'] in ['present', 'installed']:
|
||||
install_packages(module, slackpkg_path, pkgs)
|
||||
|
||||
elif p["state"] in ['removed', 'absent']:
|
||||
remove_packages(module, slackpkg_path, pkgs)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
259
plugins/modules/packaging/os/snap.py
Normal file
259
plugins/modules/packaging/os/snap.py
Normal file
@@ -0,0 +1,259 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2018, Stanislas Lange (angristan) <angristan@pm.me>
|
||||
# Copyright: (c) 2018, Victor Carceler <vcarceler@iespuigcastellar.xeill.net>
|
||||
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: snap
|
||||
|
||||
short_description: Manages snaps
|
||||
|
||||
|
||||
description:
|
||||
- "Manages snaps packages."
|
||||
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- Name of the snap to install or remove. Can be a list of snaps.
|
||||
required: true
|
||||
state:
|
||||
description:
|
||||
- Desired state of the package.
|
||||
required: false
|
||||
default: present
|
||||
choices: [ absent, present ]
|
||||
classic:
|
||||
description:
|
||||
- Confinement policy. The classic confinement allows a snap to have
|
||||
the same level of access to the system as "classic" packages,
|
||||
like those managed by APT. This option corresponds to the --classic argument.
|
||||
This option can only be specified if there is a single snap in the task.
|
||||
type: bool
|
||||
required: false
|
||||
default: False
|
||||
channel:
|
||||
description:
|
||||
- Define which release of a snap is installed and tracked for updates.
|
||||
This option can only be specified if there is a single snap in the task.
|
||||
type: str
|
||||
required: false
|
||||
default: stable
|
||||
|
||||
author:
|
||||
- Victor Carceler (@vcarceler) <vcarceler@iespuigcastellar.xeill.net>
|
||||
- Stanislas Lange (@angristan) <angristan@pm.me>
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Install "foo" and "bar" snap
|
||||
- name: Install foo
|
||||
snap:
|
||||
name:
|
||||
- foo
|
||||
- bar
|
||||
|
||||
# Remove "foo" snap
|
||||
- name: Remove foo
|
||||
snap:
|
||||
name: foo
|
||||
state: absent
|
||||
|
||||
# Install a snap with classic confinement
|
||||
- name: Install "foo" with option --classic
|
||||
snap:
|
||||
name: foo
|
||||
classic: yes
|
||||
|
||||
# Install a snap with from a specific channel
|
||||
- name: Install "foo" with option --channel=latest/edge
|
||||
snap:
|
||||
name: foo
|
||||
channel: latest/edge
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
classic:
|
||||
description: Whether or not the snaps were installed with the classic confinement
|
||||
type: bool
|
||||
returned: When snaps are installed
|
||||
channel:
|
||||
description: The channel the snaps were installed from
|
||||
type: str
|
||||
returned: When snaps are installed
|
||||
cmd:
|
||||
description: The command that was executed on the host
|
||||
type: str
|
||||
returned: When changed is true
|
||||
snaps_installed:
|
||||
description: The list of actually installed snaps
|
||||
type: list
|
||||
returned: When any snaps have been installed
|
||||
snaps_removed:
|
||||
description: The list of actually removed snaps
|
||||
type: list
|
||||
returned: When any snaps have been removed
|
||||
'''
|
||||
|
||||
import operator
|
||||
import re
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def validate_input_snaps(module):
|
||||
"""Ensure that all exist."""
|
||||
for snap_name in module.params['name']:
|
||||
if not snap_exists(module, snap_name):
|
||||
module.fail_json(msg="No snap matching '%s' available." % snap_name)
|
||||
|
||||
|
||||
def snap_exists(module, snap_name):
|
||||
snap_path = module.get_bin_path("snap", True)
|
||||
cmd_parts = [snap_path, 'info', snap_name]
|
||||
cmd = ' '.join(cmd_parts)
|
||||
rc, out, err = module.run_command(cmd, check_rc=False)
|
||||
|
||||
return rc == 0
|
||||
|
||||
|
||||
def is_snap_installed(module, snap_name):
|
||||
snap_path = module.get_bin_path("snap", True)
|
||||
cmd_parts = [snap_path, 'list', snap_name]
|
||||
cmd = ' '.join(cmd_parts)
|
||||
rc, out, err = module.run_command(cmd, check_rc=False)
|
||||
|
||||
return rc == 0
|
||||
|
||||
|
||||
def get_snap_for_action(module):
|
||||
"""Construct a list of snaps to use for current action."""
|
||||
snaps = module.params['name']
|
||||
|
||||
is_present_state = module.params['state'] == 'present'
|
||||
negation_predicate = operator.not_ if is_present_state else bool
|
||||
|
||||
def predicate(s):
|
||||
return negation_predicate(is_snap_installed(module, s))
|
||||
|
||||
return [s for s in snaps if predicate(s)]
|
||||
|
||||
|
||||
def get_base_cmd_parts(module):
|
||||
action_map = {
|
||||
'present': 'install',
|
||||
'absent': 'remove',
|
||||
}
|
||||
|
||||
state = module.params['state']
|
||||
|
||||
classic = ['--classic'] if module.params['classic'] else []
|
||||
channel = ['--channel', module.params['channel']] if module.params['channel'] and module.params['channel'] != 'stable' else []
|
||||
|
||||
snap_path = module.get_bin_path("snap", True)
|
||||
snap_action = action_map[state]
|
||||
|
||||
cmd_parts = [snap_path, snap_action]
|
||||
if snap_action == 'install':
|
||||
cmd_parts += classic + channel
|
||||
|
||||
return cmd_parts
|
||||
|
||||
|
||||
def get_cmd_parts(module, snap_names):
|
||||
"""Return list of cmds to run in exec format."""
|
||||
is_install_mode = module.params['state'] == 'present'
|
||||
has_multiple_snaps = len(snap_names) > 1
|
||||
|
||||
cmd_parts = get_base_cmd_parts(module)
|
||||
has_one_pkg_params = '--classic' in cmd_parts or '--channel' in cmd_parts
|
||||
|
||||
if not (is_install_mode and has_one_pkg_params and has_multiple_snaps):
|
||||
return [cmd_parts + snap_names]
|
||||
|
||||
return [cmd_parts + [s] for s in snap_names]
|
||||
|
||||
|
||||
def run_cmd_for(module, snap_names):
|
||||
cmds_parts = get_cmd_parts(module, snap_names)
|
||||
cmd = '; '.join(' '.join(c) for c in cmds_parts)
|
||||
cmd = 'sh -c "{0}"'.format(cmd)
|
||||
|
||||
# Actually execute the snap command
|
||||
return (cmd, ) + module.run_command(cmd, check_rc=False)
|
||||
|
||||
|
||||
def execute_action(module):
|
||||
is_install_mode = module.params['state'] == 'present'
|
||||
exit_kwargs = {
|
||||
'classic': module.params['classic'],
|
||||
'channel': module.params['channel'],
|
||||
} if is_install_mode else {}
|
||||
|
||||
actionable_snaps = get_snap_for_action(module)
|
||||
if not actionable_snaps:
|
||||
module.exit_json(changed=False, **exit_kwargs)
|
||||
|
||||
changed_def_args = {
|
||||
'changed': True,
|
||||
'snaps_{result}'.
|
||||
format(result='installed' if is_install_mode
|
||||
else 'removed'): actionable_snaps,
|
||||
}
|
||||
|
||||
if module.check_mode:
|
||||
module.exit_json(**dict(changed_def_args, **exit_kwargs))
|
||||
|
||||
cmd, rc, out, err = run_cmd_for(module, actionable_snaps)
|
||||
cmd_out_args = {
|
||||
'cmd': cmd,
|
||||
'rc': rc,
|
||||
'stdout': out,
|
||||
'stderr': err,
|
||||
}
|
||||
|
||||
if rc == 0:
|
||||
module.exit_json(**dict(changed_def_args, **dict(cmd_out_args, **exit_kwargs)))
|
||||
else:
|
||||
msg = "Ooops! Snap installation failed while executing '{cmd}', please examine logs and error output for more details.".format(cmd=cmd)
|
||||
if is_install_mode:
|
||||
m = re.match(r'^error: This revision of snap "(?P<package_name>\w+)" was published using classic confinement', err)
|
||||
if m is not None:
|
||||
err_pkg = m.group('package_name')
|
||||
msg = "Couldn't install {name} because it requires classic confinement".format(name=err_pkg)
|
||||
module.fail_json(msg=msg, **dict(cmd_out_args, **exit_kwargs))
|
||||
|
||||
|
||||
def main():
|
||||
module_args = {
|
||||
'name': dict(type='list', required=True),
|
||||
'state': dict(type='str', required=False, default='present', choices=['absent', 'present']),
|
||||
'classic': dict(type='bool', required=False, default=False),
|
||||
'channel': dict(type='str', required=False, default='stable'),
|
||||
}
|
||||
module = AnsibleModule(
|
||||
argument_spec=module_args,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
validate_input_snaps(module)
|
||||
|
||||
# Apply changes to the snaps
|
||||
execute_action(module)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
644
plugins/modules/packaging/os/sorcery.py
Normal file
644
plugins/modules/packaging/os/sorcery.py
Normal file
@@ -0,0 +1,644 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2015-2016, Vlad Glagolev <scm@vaygr.net>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: sorcery
|
||||
short_description: Package manager for Source Mage GNU/Linux
|
||||
description:
|
||||
- Manages "spells" on Source Mage GNU/Linux using I(sorcery) toolchain
|
||||
author: "Vlad Glagolev (@vaygr)"
|
||||
notes:
|
||||
- When all three components are selected, the update goes by the sequence --
|
||||
Sorcery -> Grimoire(s) -> Spell(s); you cannot override it.
|
||||
- grimoire handling (i.e. add/remove, including SCM/rsync versions) is not
|
||||
yet supported.
|
||||
requirements:
|
||||
- bash
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- Name of the spell
|
||||
- multiple names can be given, separated by commas
|
||||
- special value '*' in conjunction with states C(latest) or
|
||||
C(rebuild) will update or rebuild the whole system respectively
|
||||
aliases: ["spell"]
|
||||
|
||||
state:
|
||||
description:
|
||||
- Whether to cast, dispel or rebuild a package
|
||||
- state C(cast) is an equivalent of C(present), not C(latest)
|
||||
- state C(latest) always triggers C(update_cache=yes)
|
||||
- state C(rebuild) implies cast of all specified spells, not only
|
||||
those existed before
|
||||
choices: ["present", "latest", "absent", "cast", "dispelled", "rebuild"]
|
||||
default: "present"
|
||||
|
||||
depends:
|
||||
description:
|
||||
- Comma-separated list of _optional_ dependencies to build a spell
|
||||
(or make sure it is built) with; use +/- in front of dependency
|
||||
to turn it on/off ('+' is optional though)
|
||||
- this option is ignored if C(name) parameter is equal to '*' or
|
||||
contains more than one spell
|
||||
- providers must be supplied in the form recognized by Sorcery, e.g.
|
||||
'openssl(SSL)'
|
||||
|
||||
update:
|
||||
description:
|
||||
- Whether or not to update sorcery scripts at the very first stage
|
||||
type: bool
|
||||
default: 'no'
|
||||
|
||||
update_cache:
|
||||
description:
|
||||
- Whether or not to update grimoire collection before casting spells
|
||||
type: bool
|
||||
default: 'no'
|
||||
aliases: ["update_codex"]
|
||||
|
||||
cache_valid_time:
|
||||
description:
|
||||
- Time in seconds to invalidate grimoire collection on update
|
||||
- especially useful for SCM and rsync grimoires
|
||||
- makes sense only in pair with C(update_cache)
|
||||
'''
|
||||
|
||||
|
||||
EXAMPLES = '''
|
||||
# Make sure spell 'foo' is installed
|
||||
- sorcery:
|
||||
spell: foo
|
||||
state: present
|
||||
|
||||
# Make sure spells 'foo', 'bar' and 'baz' are removed
|
||||
- sorcery:
|
||||
spell: foo,bar,baz
|
||||
state: absent
|
||||
|
||||
# Make sure spell 'foo' with dependencies 'bar' and 'baz' is installed
|
||||
- sorcery:
|
||||
spell: foo
|
||||
depends: bar,baz
|
||||
state: present
|
||||
|
||||
# Make sure spell 'foo' with 'bar' and without 'baz' dependencies is installed
|
||||
- sorcery:
|
||||
spell: foo
|
||||
depends: +bar,-baz
|
||||
state: present
|
||||
|
||||
# Make sure spell 'foo' with libressl (providing SSL) dependency is installed
|
||||
- sorcery:
|
||||
spell: foo
|
||||
depends: libressl(SSL)
|
||||
state: present
|
||||
|
||||
# Playbook: make sure spells with/without required dependencies (if any) are installed
|
||||
- sorcery:
|
||||
name: "{{ item.spell }}"
|
||||
depends: "{{ item.depends | default(None) }}"
|
||||
state: present
|
||||
loop:
|
||||
- { spell: 'vifm', depends: '+file,-gtk+2' }
|
||||
- { spell: 'fwknop', depends: 'gpgme' }
|
||||
- { spell: 'pv,tnftp,tor' }
|
||||
|
||||
# Install the latest version of spell 'foo' using regular glossary
|
||||
- sorcery:
|
||||
name: foo
|
||||
state: latest
|
||||
|
||||
# Rebuild spell 'foo'
|
||||
- sorcery:
|
||||
spell: foo
|
||||
state: rebuild
|
||||
|
||||
# Rebuild the whole system, but update Sorcery and Codex first
|
||||
- sorcery:
|
||||
spell: '*'
|
||||
state: rebuild
|
||||
update: yes
|
||||
update_cache: yes
|
||||
|
||||
# Refresh the grimoire collection if it's 1 day old using native sorcerous alias
|
||||
- sorcery:
|
||||
update_codex: yes
|
||||
cache_valid_time: 86400
|
||||
|
||||
# Update only Sorcery itself
|
||||
- sorcery:
|
||||
update: yes
|
||||
'''
|
||||
|
||||
|
||||
RETURN = '''
|
||||
'''
|
||||
|
||||
|
||||
import datetime
|
||||
import fileinput
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
|
||||
# auto-filled at module init
|
||||
SORCERY = {
|
||||
'sorcery': None,
|
||||
'scribe': None,
|
||||
'cast': None,
|
||||
'dispel': None,
|
||||
'gaze': None
|
||||
}
|
||||
|
||||
SORCERY_LOG_DIR = "/var/log/sorcery"
|
||||
SORCERY_STATE_DIR = "/var/state/sorcery"
|
||||
|
||||
|
||||
def get_sorcery_ver(module):
|
||||
""" Get Sorcery version. """
|
||||
|
||||
cmd_sorcery = "%s --version" % SORCERY['sorcery']
|
||||
|
||||
rc, stdout, stderr = module.run_command(cmd_sorcery)
|
||||
|
||||
if rc != 0 or not stdout:
|
||||
module.fail_json(msg="unable to get Sorcery version")
|
||||
|
||||
return stdout.strip()
|
||||
|
||||
|
||||
def codex_fresh(codex, module):
|
||||
""" Check if grimoire collection is fresh enough. """
|
||||
|
||||
if not module.params['cache_valid_time']:
|
||||
return False
|
||||
|
||||
timedelta = datetime.timedelta(seconds=module.params['cache_valid_time'])
|
||||
|
||||
for grimoire in codex:
|
||||
lastupdate_path = os.path.join(SORCERY_STATE_DIR,
|
||||
grimoire + ".lastupdate")
|
||||
|
||||
try:
|
||||
mtime = os.stat(lastupdate_path).st_mtime
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
lastupdate_ts = datetime.datetime.fromtimestamp(mtime)
|
||||
|
||||
# if any grimoire is not fresh, we invalidate the Codex
|
||||
if lastupdate_ts + timedelta < datetime.datetime.now():
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def codex_list(module):
|
||||
""" List valid grimoire collection. """
|
||||
|
||||
codex = {}
|
||||
|
||||
cmd_scribe = "%s index" % SORCERY['scribe']
|
||||
|
||||
rc, stdout, stderr = module.run_command(cmd_scribe)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="unable to list grimoire collection, fix your Codex")
|
||||
|
||||
rex = re.compile(r"^\s*\[\d+\] : (?P<grim>[\w\-+.]+) : [\w\-+./]+(?: : (?P<ver>[\w\-+.]+))?\s*$")
|
||||
|
||||
# drop 4-line header and empty trailing line
|
||||
for line in stdout.splitlines()[4:-1]:
|
||||
match = rex.match(line)
|
||||
|
||||
if match:
|
||||
codex[match.group('grim')] = match.group('ver')
|
||||
|
||||
if not codex:
|
||||
module.fail_json(msg="no grimoires to operate on; add at least one")
|
||||
|
||||
return codex
|
||||
|
||||
|
||||
def update_sorcery(module):
|
||||
""" Update sorcery scripts.
|
||||
|
||||
This runs 'sorcery update' ('sorcery -u'). Check mode always returns a
|
||||
positive change value.
|
||||
|
||||
"""
|
||||
|
||||
changed = False
|
||||
|
||||
if module.check_mode:
|
||||
if not module.params['name'] and not module.params['update_cache']:
|
||||
module.exit_json(changed=True, msg="would have updated Sorcery")
|
||||
else:
|
||||
sorcery_ver = get_sorcery_ver(module)
|
||||
|
||||
cmd_sorcery = "%s update" % SORCERY['sorcery']
|
||||
|
||||
rc, stdout, stderr = module.run_command(cmd_sorcery)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="unable to update Sorcery: " + stdout)
|
||||
|
||||
if sorcery_ver != get_sorcery_ver(module):
|
||||
changed = True
|
||||
|
||||
if not module.params['name'] and not module.params['update_cache']:
|
||||
module.exit_json(changed=changed,
|
||||
msg="successfully updated Sorcery")
|
||||
|
||||
|
||||
def update_codex(module):
|
||||
""" Update grimoire collections.
|
||||
|
||||
This runs 'scribe update'. Check mode always returns a positive change
|
||||
value when 'cache_valid_time' is used.
|
||||
|
||||
"""
|
||||
|
||||
params = module.params
|
||||
|
||||
changed = False
|
||||
|
||||
codex = codex_list(module)
|
||||
fresh = codex_fresh(codex, module)
|
||||
|
||||
if module.check_mode:
|
||||
if not params['name']:
|
||||
if not fresh:
|
||||
changed = True
|
||||
|
||||
module.exit_json(changed=changed, msg="would have updated Codex")
|
||||
elif not fresh or params['name'] and params['state'] == 'latest':
|
||||
# SILENT is required as a workaround for query() in libgpg
|
||||
module.run_command_environ_update.update(dict(SILENT='1'))
|
||||
|
||||
cmd_scribe = "%s update" % SORCERY['scribe']
|
||||
|
||||
rc, stdout, stderr = module.run_command(cmd_scribe)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="unable to update Codex: " + stdout)
|
||||
|
||||
if codex != codex_list(module):
|
||||
changed = True
|
||||
|
||||
if not params['name']:
|
||||
module.exit_json(changed=changed,
|
||||
msg="successfully updated Codex")
|
||||
|
||||
|
||||
def match_depends(module):
|
||||
""" Check for matching dependencies.
|
||||
|
||||
This inspects spell's dependencies with the desired states and returns
|
||||
'False' if a recast is needed to match them. It also adds required lines
|
||||
to the system-wide depends file for proper recast procedure.
|
||||
|
||||
"""
|
||||
|
||||
params = module.params
|
||||
spells = params['name']
|
||||
|
||||
depends = {}
|
||||
|
||||
depends_ok = True
|
||||
|
||||
if len(spells) > 1 or not params['depends']:
|
||||
return depends_ok
|
||||
|
||||
spell = spells[0]
|
||||
|
||||
if module.check_mode:
|
||||
sorcery_depends_orig = os.path.join(SORCERY_STATE_DIR, "depends")
|
||||
sorcery_depends = os.path.join(SORCERY_STATE_DIR, "depends.check")
|
||||
|
||||
try:
|
||||
shutil.copy2(sorcery_depends_orig, sorcery_depends)
|
||||
except IOError:
|
||||
module.fail_json(msg="failed to copy depends.check file")
|
||||
else:
|
||||
sorcery_depends = os.path.join(SORCERY_STATE_DIR, "depends")
|
||||
|
||||
rex = re.compile(r"^(?P<status>\+?|\-){1}(?P<depend>[a-z0-9]+[a-z0-9_\-\+\.]*(\([A-Z0-9_\-\+\.]+\))*)$")
|
||||
|
||||
for d in params['depends'].split(','):
|
||||
match = rex.match(d)
|
||||
|
||||
if not match:
|
||||
module.fail_json(msg="wrong depends line for spell '%s'" % spell)
|
||||
|
||||
# normalize status
|
||||
if not match.group('status') or match.group('status') == '+':
|
||||
status = 'on'
|
||||
else:
|
||||
status = 'off'
|
||||
|
||||
depends[match.group('depend')] = status
|
||||
|
||||
# drop providers spec
|
||||
depends_list = [s.split('(')[0] for s in depends]
|
||||
|
||||
cmd_gaze = "%s -q version %s" % (SORCERY['gaze'], ' '.join(depends_list))
|
||||
|
||||
rc, stdout, stderr = module.run_command(cmd_gaze)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="wrong dependencies for spell '%s'" % spell)
|
||||
|
||||
fi = fileinput.input(sorcery_depends, inplace=True)
|
||||
|
||||
try:
|
||||
try:
|
||||
for line in fi:
|
||||
if line.startswith(spell + ':'):
|
||||
match = None
|
||||
|
||||
for d in depends:
|
||||
# when local status is 'off' and dependency is provider,
|
||||
# use only provider value
|
||||
d_offset = d.find('(')
|
||||
|
||||
if d_offset == -1:
|
||||
d_p = ''
|
||||
else:
|
||||
d_p = re.escape(d[d_offset:])
|
||||
|
||||
# .escape() is needed mostly for the spells like 'libsigc++'
|
||||
rex = re.compile("%s:(?:%s|%s):(?P<lstatus>on|off):optional:" %
|
||||
(re.escape(spell), re.escape(d), d_p))
|
||||
|
||||
match = rex.match(line)
|
||||
|
||||
# we matched the line "spell:dependency:on|off:optional:"
|
||||
if match:
|
||||
# if we also matched the local status, mark dependency
|
||||
# as empty and put it back into depends file
|
||||
if match.group('lstatus') == depends[d]:
|
||||
depends[d] = None
|
||||
|
||||
sys.stdout.write(line)
|
||||
|
||||
# status is not that we need, so keep this dependency
|
||||
# in the list for further reverse switching;
|
||||
# stop and process the next line in both cases
|
||||
break
|
||||
|
||||
if not match:
|
||||
sys.stdout.write(line)
|
||||
else:
|
||||
sys.stdout.write(line)
|
||||
except IOError:
|
||||
module.fail_json(msg="I/O error on the depends file")
|
||||
finally:
|
||||
fi.close()
|
||||
|
||||
depends_new = [v for v in depends if depends[v]]
|
||||
|
||||
if depends_new:
|
||||
try:
|
||||
try:
|
||||
fl = open(sorcery_depends, 'a')
|
||||
|
||||
for k in depends_new:
|
||||
fl.write("%s:%s:%s:optional::\n" % (spell, k, depends[k]))
|
||||
except IOError:
|
||||
module.fail_json(msg="I/O error on the depends file")
|
||||
finally:
|
||||
fl.close()
|
||||
|
||||
depends_ok = False
|
||||
|
||||
if module.check_mode:
|
||||
try:
|
||||
os.remove(sorcery_depends)
|
||||
except IOError:
|
||||
module.fail_json(msg="failed to clean up depends.backup file")
|
||||
|
||||
return depends_ok
|
||||
|
||||
|
||||
def manage_spells(module):
|
||||
""" Cast or dispel spells.
|
||||
|
||||
This manages the whole system ('*'), list or a single spell. Command 'cast'
|
||||
is used to install or rebuild spells, while 'dispel' takes care of theirs
|
||||
removal from the system.
|
||||
|
||||
"""
|
||||
|
||||
params = module.params
|
||||
spells = params['name']
|
||||
|
||||
sorcery_queue = os.path.join(SORCERY_LOG_DIR, "queue/install")
|
||||
|
||||
if spells == '*':
|
||||
if params['state'] == 'latest':
|
||||
# back up original queue
|
||||
try:
|
||||
os.rename(sorcery_queue, sorcery_queue + ".backup")
|
||||
except IOError:
|
||||
module.fail_json(msg="failed to backup the update queue")
|
||||
|
||||
# see update_codex()
|
||||
module.run_command_environ_update.update(dict(SILENT='1'))
|
||||
|
||||
cmd_sorcery = "%s queue"
|
||||
|
||||
rc, stdout, stderr = module.run_command(cmd_sorcery)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed to generate the update queue")
|
||||
|
||||
try:
|
||||
queue_size = os.stat(sorcery_queue).st_size
|
||||
except Exception:
|
||||
module.fail_json(msg="failed to read the update queue")
|
||||
|
||||
if queue_size != 0:
|
||||
if module.check_mode:
|
||||
try:
|
||||
os.rename(sorcery_queue + ".backup", sorcery_queue)
|
||||
except IOError:
|
||||
module.fail_json(msg="failed to restore the update queue")
|
||||
|
||||
module.exit_json(changed=True, msg="would have updated the system")
|
||||
|
||||
cmd_cast = "%s --queue" % SORCERY['cast']
|
||||
|
||||
rc, stdout, stderr = module.run_command(cmd_cast)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed to update the system")
|
||||
|
||||
module.exit_json(changed=True, msg="successfully updated the system")
|
||||
else:
|
||||
module.exit_json(changed=False, msg="the system is already up to date")
|
||||
elif params['state'] == 'rebuild':
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True, msg="would have rebuilt the system")
|
||||
|
||||
cmd_sorcery = "%s rebuild" % SORCERY['sorcery']
|
||||
|
||||
rc, stdout, stderr = module.run_command(cmd_sorcery)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed to rebuild the system: " + stdout)
|
||||
|
||||
module.exit_json(changed=True, msg="successfully rebuilt the system")
|
||||
else:
|
||||
module.fail_json(msg="unsupported operation on '*' name value")
|
||||
else:
|
||||
if params['state'] in ('present', 'latest', 'rebuild', 'absent'):
|
||||
# extract versions from the 'gaze' command
|
||||
cmd_gaze = "%s -q version %s" % (SORCERY['gaze'], ' '.join(spells))
|
||||
|
||||
rc, stdout, stderr = module.run_command(cmd_gaze)
|
||||
|
||||
# fail if any of spells cannot be found
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed to locate spell(s) in the list (%s)" %
|
||||
', '.join(spells))
|
||||
|
||||
cast_queue = []
|
||||
dispel_queue = []
|
||||
|
||||
rex = re.compile(r"[^|]+\|[^|]+\|(?P<spell>[^|]+)\|(?P<grim_ver>[^|]+)\|(?P<inst_ver>[^$]+)")
|
||||
|
||||
# drop 2-line header and empty trailing line
|
||||
for line in stdout.splitlines()[2:-1]:
|
||||
match = rex.match(line)
|
||||
|
||||
cast = False
|
||||
|
||||
if params['state'] == 'present':
|
||||
# spell is not installed..
|
||||
if match.group('inst_ver') == '-':
|
||||
# ..so set up depends reqs for it
|
||||
match_depends(module)
|
||||
|
||||
cast = True
|
||||
# spell is installed..
|
||||
else:
|
||||
# ..but does not conform depends reqs
|
||||
if not match_depends(module):
|
||||
cast = True
|
||||
elif params['state'] == 'latest':
|
||||
# grimoire and installed versions do not match..
|
||||
if match.group('grim_ver') != match.group('inst_ver'):
|
||||
# ..so check for depends reqs first and set them up
|
||||
match_depends(module)
|
||||
|
||||
cast = True
|
||||
# grimoire and installed versions match..
|
||||
else:
|
||||
# ..but the spell does not conform depends reqs
|
||||
if not match_depends(module):
|
||||
cast = True
|
||||
elif params['state'] == 'rebuild':
|
||||
cast = True
|
||||
# 'absent'
|
||||
else:
|
||||
if match.group('inst_ver') != '-':
|
||||
dispel_queue.append(match.group('spell'))
|
||||
|
||||
if cast:
|
||||
cast_queue.append(match.group('spell'))
|
||||
|
||||
if cast_queue:
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True, msg="would have cast spell(s)")
|
||||
|
||||
cmd_cast = "%s -c %s" % (SORCERY['cast'], ' '.join(cast_queue))
|
||||
|
||||
rc, stdout, stderr = module.run_command(cmd_cast)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed to cast spell(s): %s" + stdout)
|
||||
|
||||
module.exit_json(changed=True, msg="successfully cast spell(s)")
|
||||
elif params['state'] != 'absent':
|
||||
module.exit_json(changed=False, msg="spell(s) are already cast")
|
||||
|
||||
if dispel_queue:
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True, msg="would have dispelled spell(s)")
|
||||
|
||||
cmd_dispel = "%s %s" % (SORCERY['dispel'], ' '.join(dispel_queue))
|
||||
|
||||
rc, stdout, stderr = module.run_command(cmd_dispel)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed to dispel spell(s): %s" + stdout)
|
||||
|
||||
module.exit_json(changed=True, msg="successfully dispelled spell(s)")
|
||||
else:
|
||||
module.exit_json(changed=False, msg="spell(s) are already dispelled")
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(default=None, aliases=['spell'], type='list'),
|
||||
state=dict(default='present', choices=['present', 'latest',
|
||||
'absent', 'cast', 'dispelled', 'rebuild']),
|
||||
depends=dict(default=None),
|
||||
update=dict(default=False, type='bool'),
|
||||
update_cache=dict(default=False, aliases=['update_codex'], type='bool'),
|
||||
cache_valid_time=dict(default=0, type='int')
|
||||
),
|
||||
required_one_of=[['name', 'update', 'update_cache']],
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if os.geteuid() != 0:
|
||||
module.fail_json(msg="root privileges are required for this operation")
|
||||
|
||||
for c in SORCERY:
|
||||
SORCERY[c] = module.get_bin_path(c, True)
|
||||
|
||||
# prepare environment: run sorcery commands without asking questions
|
||||
module.run_command_environ_update = dict(PROMPT_DELAY='0', VOYEUR='0')
|
||||
|
||||
params = module.params
|
||||
|
||||
# normalize 'state' parameter
|
||||
if params['state'] in ('present', 'cast'):
|
||||
params['state'] = 'present'
|
||||
elif params['state'] in ('absent', 'dispelled'):
|
||||
params['state'] = 'absent'
|
||||
|
||||
if params['update']:
|
||||
update_sorcery(module)
|
||||
|
||||
if params['update_cache'] or params['state'] == 'latest':
|
||||
update_codex(module)
|
||||
|
||||
if params['name']:
|
||||
manage_spells(module)
|
||||
|
||||
|
||||
# import module snippets
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
261
plugins/modules/packaging/os/svr4pkg.py
Normal file
261
plugins/modules/packaging/os/svr4pkg.py
Normal file
@@ -0,0 +1,261 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2012, Boyd Adamson <boyd () boydadamson.com>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: svr4pkg
|
||||
short_description: Manage Solaris SVR4 packages
|
||||
description:
|
||||
- Manages SVR4 packages on Solaris 10 and 11.
|
||||
- These were the native packages on Solaris <= 10 and are available
|
||||
as a legacy feature in Solaris 11.
|
||||
- Note that this is a very basic packaging system. It will not enforce
|
||||
dependencies on install or remove.
|
||||
author: "Boyd Adamson (@brontitall)"
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- Package name, e.g. C(SUNWcsr)
|
||||
required: true
|
||||
|
||||
state:
|
||||
description:
|
||||
- Whether to install (C(present)), or remove (C(absent)) a package.
|
||||
- If the package is to be installed, then I(src) is required.
|
||||
- The SVR4 package system doesn't provide an upgrade operation. You need to uninstall the old, then install the new package.
|
||||
required: true
|
||||
choices: ["present", "absent"]
|
||||
|
||||
src:
|
||||
description:
|
||||
- Specifies the location to install the package from. Required when C(state=present).
|
||||
- "Can be any path acceptable to the C(pkgadd) command's C(-d) option. e.g.: C(somefile.pkg), C(/dir/with/pkgs), C(http:/server/mypkgs.pkg)."
|
||||
- If using a file or directory, they must already be accessible by the host. See the M(copy) module for a way to get them there.
|
||||
proxy:
|
||||
description:
|
||||
- HTTP[s] proxy to be used if C(src) is a URL.
|
||||
response_file:
|
||||
description:
|
||||
- Specifies the location of a response file to be used if package expects input on install. (added in Ansible 1.4)
|
||||
required: false
|
||||
zone:
|
||||
description:
|
||||
- Whether to install the package only in the current zone, or install it into all zones.
|
||||
- The installation into all zones works only if you are working with the global zone.
|
||||
required: false
|
||||
default: "all"
|
||||
choices: ["current", "all"]
|
||||
category:
|
||||
description:
|
||||
- Install/Remove category instead of a single package.
|
||||
required: false
|
||||
type: bool
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Install a package from an already copied file
|
||||
- svr4pkg:
|
||||
name: CSWcommon
|
||||
src: /tmp/cswpkgs.pkg
|
||||
state: present
|
||||
|
||||
# Install a package directly from an http site
|
||||
- svr4pkg:
|
||||
name: CSWpkgutil
|
||||
src: 'http://get.opencsw.org/now'
|
||||
state: present
|
||||
zone: current
|
||||
|
||||
# Install a package with a response file
|
||||
- svr4pkg:
|
||||
name: CSWggrep
|
||||
src: /tmp/third-party.pkg
|
||||
response_file: /tmp/ggrep.response
|
||||
state: present
|
||||
|
||||
# Ensure that a package is not installed.
|
||||
- svr4pkg:
|
||||
name: SUNWgnome-sound-recorder
|
||||
state: absent
|
||||
|
||||
# Ensure that a category is not installed.
|
||||
- svr4pkg:
|
||||
name: FIREFOX
|
||||
state: absent
|
||||
category: true
|
||||
'''
|
||||
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def package_installed(module, name, category):
|
||||
cmd = [module.get_bin_path('pkginfo', True)]
|
||||
cmd.append('-q')
|
||||
if category:
|
||||
cmd.append('-c')
|
||||
cmd.append(name)
|
||||
rc, out, err = module.run_command(' '.join(cmd))
|
||||
if rc == 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def create_admin_file():
|
||||
(desc, filename) = tempfile.mkstemp(prefix='ansible_svr4pkg', text=True)
|
||||
fullauto = '''
|
||||
mail=
|
||||
instance=unique
|
||||
partial=nocheck
|
||||
runlevel=quit
|
||||
idepend=nocheck
|
||||
rdepend=nocheck
|
||||
space=quit
|
||||
setuid=nocheck
|
||||
conflict=nocheck
|
||||
action=nocheck
|
||||
networktimeout=60
|
||||
networkretries=3
|
||||
authentication=quit
|
||||
keystore=/var/sadm/security
|
||||
proxy=
|
||||
basedir=default
|
||||
'''
|
||||
os.write(desc, fullauto)
|
||||
os.close(desc)
|
||||
return filename
|
||||
|
||||
|
||||
def run_command(module, cmd):
|
||||
progname = cmd[0]
|
||||
cmd[0] = module.get_bin_path(progname, True)
|
||||
return module.run_command(cmd)
|
||||
|
||||
|
||||
def package_install(module, name, src, proxy, response_file, zone, category):
|
||||
adminfile = create_admin_file()
|
||||
cmd = ['pkgadd', '-n']
|
||||
if zone == 'current':
|
||||
cmd += ['-G']
|
||||
cmd += ['-a', adminfile, '-d', src]
|
||||
if proxy is not None:
|
||||
cmd += ['-x', proxy]
|
||||
if response_file is not None:
|
||||
cmd += ['-r', response_file]
|
||||
if category:
|
||||
cmd += ['-Y']
|
||||
cmd.append(name)
|
||||
(rc, out, err) = run_command(module, cmd)
|
||||
os.unlink(adminfile)
|
||||
return (rc, out, err)
|
||||
|
||||
|
||||
def package_uninstall(module, name, src, category):
|
||||
adminfile = create_admin_file()
|
||||
if category:
|
||||
cmd = ['pkgrm', '-na', adminfile, '-Y', name]
|
||||
else:
|
||||
cmd = ['pkgrm', '-na', adminfile, name]
|
||||
(rc, out, err) = run_command(module, cmd)
|
||||
os.unlink(adminfile)
|
||||
return (rc, out, err)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(required=True),
|
||||
state=dict(required=True, choices=['present', 'absent']),
|
||||
src=dict(default=None),
|
||||
proxy=dict(default=None),
|
||||
response_file=dict(default=None),
|
||||
zone=dict(required=False, default='all', choices=['current', 'all']),
|
||||
category=dict(default=False, type='bool')
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
state = module.params['state']
|
||||
name = module.params['name']
|
||||
src = module.params['src']
|
||||
proxy = module.params['proxy']
|
||||
response_file = module.params['response_file']
|
||||
zone = module.params['zone']
|
||||
category = module.params['category']
|
||||
rc = None
|
||||
out = ''
|
||||
err = ''
|
||||
result = {}
|
||||
result['name'] = name
|
||||
result['state'] = state
|
||||
|
||||
if state == 'present':
|
||||
if src is None:
|
||||
module.fail_json(name=name,
|
||||
msg="src is required when state=present")
|
||||
if not package_installed(module, name, category):
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
(rc, out, err) = package_install(module, name, src, proxy, response_file, zone, category)
|
||||
# Stdout is normally empty but for some packages can be
|
||||
# very long and is not often useful
|
||||
if len(out) > 75:
|
||||
out = out[:75] + '...'
|
||||
|
||||
elif state == 'absent':
|
||||
if package_installed(module, name, category):
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
(rc, out, err) = package_uninstall(module, name, src, category)
|
||||
out = out[:75]
|
||||
|
||||
# Returncodes as per pkgadd(1m)
|
||||
# 0 Successful completion
|
||||
# 1 Fatal error.
|
||||
# 2 Warning.
|
||||
# 3 Interruption.
|
||||
# 4 Administration.
|
||||
# 5 Administration. Interaction is required. Do not use pkgadd -n.
|
||||
# 10 Reboot after installation of all packages.
|
||||
# 20 Reboot after installation of this package.
|
||||
# 99 (observed) pkgadd: ERROR: could not process datastream from </tmp/pkgutil.pkg>
|
||||
if rc in (0, 2, 3, 10, 20):
|
||||
result['changed'] = True
|
||||
# no install nor uninstall, or failed
|
||||
else:
|
||||
result['changed'] = False
|
||||
|
||||
# rc will be none when the package already was installed and no action took place
|
||||
# Only return failed=False when the returncode is known to be good as there may be more
|
||||
# undocumented failure return codes
|
||||
if rc not in (None, 0, 2, 10, 20):
|
||||
result['failed'] = True
|
||||
else:
|
||||
result['failed'] = False
|
||||
|
||||
if out:
|
||||
result['stdout'] = out
|
||||
if err:
|
||||
result['stderr'] = err
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
204
plugins/modules/packaging/os/swdepot.py
Normal file
204
plugins/modules/packaging/os/swdepot.py
Normal file
@@ -0,0 +1,204 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, Raul Melo
|
||||
# Written by Raul Melo <raulmelo@gmail.com>
|
||||
# Based on yum module written by Seth Vidal <skvidal at fedoraproject.org>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: swdepot
|
||||
short_description: Manage packages with swdepot package manager (HP-UX)
|
||||
description:
|
||||
- Will install, upgrade and remove packages with swdepot package manager (HP-UX)
|
||||
notes: []
|
||||
author: "Raul Melo (@melodous)"
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- package name.
|
||||
required: true
|
||||
state:
|
||||
description:
|
||||
- whether to install (C(present), C(latest)), or remove (C(absent)) a package.
|
||||
required: true
|
||||
choices: [ 'present', 'latest', 'absent']
|
||||
depot:
|
||||
description:
|
||||
- The source repository from which install or upgrade a package.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- swdepot:
|
||||
name: unzip-6.0
|
||||
state: present
|
||||
depot: 'repository:/path'
|
||||
|
||||
- swdepot:
|
||||
name: unzip
|
||||
state: latest
|
||||
depot: 'repository:/path'
|
||||
|
||||
- swdepot:
|
||||
name: unzip
|
||||
state: absent
|
||||
'''
|
||||
|
||||
import re
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six.moves import shlex_quote
|
||||
|
||||
|
||||
def compare_package(version1, version2):
|
||||
""" Compare version packages.
|
||||
Return values:
|
||||
-1 first minor
|
||||
0 equal
|
||||
1 first greater """
|
||||
|
||||
def normalize(v):
|
||||
return [int(x) for x in re.sub(r'(\.0+)*$', '', v).split(".")]
|
||||
normalized_version1 = normalize(version1)
|
||||
normalized_version2 = normalize(version2)
|
||||
if normalized_version1 == normalized_version2:
|
||||
rc = 0
|
||||
elif normalized_version1 < normalized_version2:
|
||||
rc = -1
|
||||
else:
|
||||
rc = 1
|
||||
return rc
|
||||
|
||||
|
||||
def query_package(module, name, depot=None):
|
||||
""" Returns whether a package is installed or not and version. """
|
||||
|
||||
cmd_list = '/usr/sbin/swlist -a revision -l product'
|
||||
if depot:
|
||||
rc, stdout, stderr = module.run_command("%s -s %s %s | grep %s" % (cmd_list, shlex_quote(depot), shlex_quote(name), shlex_quote(name)),
|
||||
use_unsafe_shell=True)
|
||||
else:
|
||||
rc, stdout, stderr = module.run_command("%s %s | grep %s" % (cmd_list, shlex_quote(name), shlex_quote(name)), use_unsafe_shell=True)
|
||||
if rc == 0:
|
||||
version = re.sub(r"\s\s+|\t", " ", stdout).strip().split()[1]
|
||||
else:
|
||||
version = None
|
||||
|
||||
return rc, version
|
||||
|
||||
|
||||
def remove_package(module, name):
|
||||
""" Uninstall package if installed. """
|
||||
|
||||
cmd_remove = '/usr/sbin/swremove'
|
||||
rc, stdout, stderr = module.run_command("%s %s" % (cmd_remove, name))
|
||||
|
||||
if rc == 0:
|
||||
return rc, stdout
|
||||
else:
|
||||
return rc, stderr
|
||||
|
||||
|
||||
def install_package(module, depot, name):
|
||||
""" Install package if not already installed """
|
||||
|
||||
cmd_install = '/usr/sbin/swinstall -x mount_all_filesystems=false'
|
||||
rc, stdout, stderr = module.run_command("%s -s %s %s" % (cmd_install, depot, name))
|
||||
if rc == 0:
|
||||
return rc, stdout
|
||||
else:
|
||||
return rc, stderr
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(aliases=['pkg'], required=True),
|
||||
state=dict(choices=['present', 'absent', 'latest'], required=True),
|
||||
depot=dict(default=None, required=False)
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
name = module.params['name']
|
||||
state = module.params['state']
|
||||
depot = module.params['depot']
|
||||
|
||||
changed = False
|
||||
msg = "No changed"
|
||||
rc = 0
|
||||
if (state == 'present' or state == 'latest') and depot is None:
|
||||
output = "depot parameter is mandatory in present or latest task"
|
||||
module.fail_json(name=name, msg=output, rc=rc)
|
||||
|
||||
# Check local version
|
||||
rc, version_installed = query_package(module, name)
|
||||
if not rc:
|
||||
installed = True
|
||||
msg = "Already installed"
|
||||
|
||||
else:
|
||||
installed = False
|
||||
|
||||
if (state == 'present' or state == 'latest') and installed is False:
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
rc, output = install_package(module, depot, name)
|
||||
|
||||
if not rc:
|
||||
changed = True
|
||||
msg = "Package installed"
|
||||
|
||||
else:
|
||||
module.fail_json(name=name, msg=output, rc=rc)
|
||||
|
||||
elif state == 'latest' and installed is True:
|
||||
# Check depot version
|
||||
rc, version_depot = query_package(module, name, depot)
|
||||
|
||||
if not rc:
|
||||
if compare_package(version_installed, version_depot) == -1:
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
# Install new version
|
||||
rc, output = install_package(module, depot, name)
|
||||
|
||||
if not rc:
|
||||
msg = "Package upgraded, Before " + version_installed + " Now " + version_depot
|
||||
changed = True
|
||||
|
||||
else:
|
||||
module.fail_json(name=name, msg=output, rc=rc)
|
||||
|
||||
else:
|
||||
output = "Software package not in repository " + depot
|
||||
module.fail_json(name=name, msg=output, rc=rc)
|
||||
|
||||
elif state == 'absent' and installed is True:
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
rc, output = remove_package(module, name)
|
||||
if not rc:
|
||||
changed = True
|
||||
msg = "Package removed"
|
||||
else:
|
||||
module.fail_json(name=name, msg=output, rc=rc)
|
||||
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=False)
|
||||
|
||||
module.exit_json(changed=changed, name=name, state=state, msg=msg)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
309
plugins/modules/packaging/os/swupd.py
Normal file
309
plugins/modules/packaging/os/swupd.py
Normal file
@@ -0,0 +1,309 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# (c) 2017, Alberto Murillo <alberto.murillo.silva@intel.com>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: swupd
|
||||
short_description: Manages updates and bundles in ClearLinux systems.
|
||||
description:
|
||||
- Manages updates and bundles with the swupd bundle manager, which is used by the
|
||||
Clear Linux Project for Intel Architecture.
|
||||
author: Alberto Murillo (@albertomurillo)
|
||||
options:
|
||||
contenturl:
|
||||
description:
|
||||
- URL pointing to the contents of available bundles.
|
||||
If not specified, the contents are retrieved from clearlinux.org.
|
||||
format:
|
||||
description:
|
||||
- The format suffix for version file downloads. For example [1,2,3,staging,etc].
|
||||
If not specified, the default format is used.
|
||||
manifest:
|
||||
description:
|
||||
- The manifest contains information about the bundles at certain version of the OS.
|
||||
Specify a Manifest version to verify against that version or leave unspecified to
|
||||
verify against the current version.
|
||||
aliases: [release, version]
|
||||
name:
|
||||
description:
|
||||
- Name of the (I)bundle to install or remove.
|
||||
aliases: [bundle]
|
||||
state:
|
||||
description:
|
||||
- Indicates the desired (I)bundle state. C(present) ensures the bundle
|
||||
is installed while C(absent) ensures the (I)bundle is not installed.
|
||||
default: present
|
||||
choices: [present, absent]
|
||||
update:
|
||||
description:
|
||||
- Updates the OS to the latest version.
|
||||
type: bool
|
||||
url:
|
||||
description:
|
||||
- Overrides both I(contenturl) and I(versionurl).
|
||||
verify:
|
||||
description:
|
||||
- Verify content for OS version.
|
||||
type: bool
|
||||
versionurl:
|
||||
description:
|
||||
- URL for version string download.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Update the OS to the latest version
|
||||
swupd:
|
||||
update: yes
|
||||
|
||||
- name: Installs the "foo" bundle
|
||||
swupd:
|
||||
name: foo
|
||||
state: present
|
||||
|
||||
- name: Removes the "foo" bundle
|
||||
swupd:
|
||||
name: foo
|
||||
state: absent
|
||||
|
||||
- name: Check integrity of filesystem
|
||||
swupd:
|
||||
verify: yes
|
||||
|
||||
- name: Downgrade OS to release 12920
|
||||
swupd:
|
||||
verify: yes
|
||||
manifest: 12920
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
stdout:
|
||||
description: stdout of swupd
|
||||
returned: always
|
||||
type: str
|
||||
stderr:
|
||||
description: stderr of swupd
|
||||
returned: always
|
||||
type: str
|
||||
'''
|
||||
|
||||
import os
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
class Swupd(object):
|
||||
FILES_NOT_MATCH = "files did not match"
|
||||
FILES_REPLACED = "missing files were replaced"
|
||||
FILES_FIXED = "files were fixed"
|
||||
FILES_DELETED = "files were deleted"
|
||||
|
||||
def __init__(self, module):
|
||||
# Fail if swupd is not found
|
||||
self.module = module
|
||||
self.swupd_cmd = module.get_bin_path("swupd", False)
|
||||
if not self.swupd_cmd:
|
||||
module.fail_json(msg="Could not find swupd.")
|
||||
|
||||
# Initialize parameters
|
||||
for key in module.params.keys():
|
||||
setattr(self, key, module.params[key])
|
||||
|
||||
# Initialize return values
|
||||
self.changed = False
|
||||
self.failed = False
|
||||
self.msg = None
|
||||
self.rc = None
|
||||
self.stderr = ""
|
||||
self.stdout = ""
|
||||
|
||||
def _run_cmd(self, cmd):
|
||||
self.rc, self.stdout, self.stderr = self.module.run_command(cmd, check_rc=False)
|
||||
|
||||
def _get_cmd(self, command):
|
||||
cmd = "%s %s" % (self.swupd_cmd, command)
|
||||
|
||||
if self.format:
|
||||
cmd += " --format=%s" % self.format
|
||||
if self.manifest:
|
||||
cmd += " --manifest=%s" % self.manifest
|
||||
if self.url:
|
||||
cmd += " --url=%s" % self.url
|
||||
else:
|
||||
if self.contenturl and command != "check-update":
|
||||
cmd += " --contenturl=%s" % self.contenturl
|
||||
if self.versionurl:
|
||||
cmd += " --versionurl=%s" % self.versionurl
|
||||
|
||||
return cmd
|
||||
|
||||
def _is_bundle_installed(self, bundle):
|
||||
try:
|
||||
os.stat("/usr/share/clear/bundles/%s" % bundle)
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _needs_update(self):
|
||||
cmd = self._get_cmd("check-update")
|
||||
self._run_cmd(cmd)
|
||||
|
||||
if self.rc == 0:
|
||||
return True
|
||||
|
||||
if self.rc == 1:
|
||||
return False
|
||||
|
||||
self.failed = True
|
||||
self.msg = "Failed to check for updates"
|
||||
|
||||
def _needs_verify(self):
|
||||
cmd = self._get_cmd("verify")
|
||||
self._run_cmd(cmd)
|
||||
|
||||
if self.rc != 0:
|
||||
self.failed = True
|
||||
self.msg = "Failed to check for filesystem inconsistencies."
|
||||
|
||||
if self.FILES_NOT_MATCH in self.stdout:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def install_bundle(self, bundle):
|
||||
"""Installs a bundle with `swupd bundle-add bundle`"""
|
||||
if self.module.check_mode:
|
||||
self.module.exit_json(changed=not self._is_bundle_installed(bundle))
|
||||
|
||||
if self._is_bundle_installed(bundle):
|
||||
self.msg = "Bundle %s is already installed" % bundle
|
||||
return
|
||||
|
||||
cmd = self._get_cmd("bundle-add %s" % bundle)
|
||||
self._run_cmd(cmd)
|
||||
|
||||
if self.rc == 0:
|
||||
self.changed = True
|
||||
self.msg = "Bundle %s installed" % bundle
|
||||
return
|
||||
|
||||
self.failed = True
|
||||
self.msg = "Failed to install bundle %s" % bundle
|
||||
|
||||
def remove_bundle(self, bundle):
|
||||
"""Removes a bundle with `swupd bundle-remove bundle`"""
|
||||
if self.module.check_mode:
|
||||
self.module.exit_json(changed=self._is_bundle_installed(bundle))
|
||||
|
||||
if not self._is_bundle_installed(bundle):
|
||||
self.msg = "Bundle %s not installed"
|
||||
return
|
||||
|
||||
cmd = self._get_cmd("bundle-remove %s" % bundle)
|
||||
self._run_cmd(cmd)
|
||||
|
||||
if self.rc == 0:
|
||||
self.changed = True
|
||||
self.msg = "Bundle %s removed" % bundle
|
||||
return
|
||||
|
||||
self.failed = True
|
||||
self.msg = "Failed to remove bundle %s" % bundle
|
||||
|
||||
def update_os(self):
|
||||
"""Updates the os with `swupd update`"""
|
||||
if self.module.check_mode:
|
||||
self.module.exit_json(changed=self._needs_update())
|
||||
|
||||
if not self._needs_update():
|
||||
self.msg = "There are no updates available"
|
||||
return
|
||||
|
||||
cmd = self._get_cmd("update")
|
||||
self._run_cmd(cmd)
|
||||
|
||||
if self.rc == 0:
|
||||
self.changed = True
|
||||
self.msg = "Update successful"
|
||||
return
|
||||
|
||||
self.failed = True
|
||||
self.msg = "Failed to check for updates"
|
||||
|
||||
def verify_os(self):
|
||||
"""Verifies filesystem against specified or current version"""
|
||||
if self.module.check_mode:
|
||||
self.module.exit_json(changed=self._needs_verify())
|
||||
|
||||
if not self._needs_verify():
|
||||
self.msg = "No files where changed"
|
||||
return
|
||||
|
||||
cmd = self._get_cmd("verify --fix")
|
||||
self._run_cmd(cmd)
|
||||
|
||||
if self.rc == 0 and (self.FILES_REPLACED in self.stdout or self.FILES_FIXED in self.stdout or self.FILES_DELETED in self.stdout):
|
||||
self.changed = True
|
||||
self.msg = "Fix successful"
|
||||
return
|
||||
|
||||
self.failed = True
|
||||
self.msg = "Failed to verify the OS"
|
||||
|
||||
|
||||
def main():
|
||||
"""The main function."""
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
contenturl=dict(type="str"),
|
||||
format=dict(type="str"),
|
||||
manifest=dict(aliases=["release", "version"], type="int"),
|
||||
name=dict(aliases=["bundle"], type="str"),
|
||||
state=dict(default="present", choices=["present", "absent"], type="str"),
|
||||
update=dict(default=False, type="bool"),
|
||||
url=dict(type="str"),
|
||||
verify=dict(default=False, type="bool"),
|
||||
versionurl=dict(type="str"),
|
||||
),
|
||||
required_one_of=[["name", "update", "verify"]],
|
||||
mutually_exclusive=[["name", "update", "verify"]],
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
swupd = Swupd(module)
|
||||
|
||||
name = module.params["name"]
|
||||
state = module.params["state"]
|
||||
update = module.params["update"]
|
||||
verify = module.params["verify"]
|
||||
|
||||
if update:
|
||||
swupd.update_os()
|
||||
elif verify:
|
||||
swupd.verify_os()
|
||||
elif state == "present":
|
||||
swupd.install_bundle(name)
|
||||
elif state == "absent":
|
||||
swupd.remove_bundle(name)
|
||||
else:
|
||||
swupd.failed = True
|
||||
|
||||
if swupd.failed:
|
||||
module.fail_json(msg=swupd.msg, stdout=swupd.stdout, stderr=swupd.stderr)
|
||||
else:
|
||||
module.exit_json(changed=swupd.changed, msg=swupd.msg, stdout=swupd.stdout, stderr=swupd.stderr)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
219
plugins/modules/packaging/os/urpmi.py
Normal file
219
plugins/modules/packaging/os/urpmi.py
Normal file
@@ -0,0 +1,219 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2013, Philippe Makowski
|
||||
# Written by Philippe Makowski <philippem@mageia.org>
|
||||
# Based on apt module written by Matthew Williams <matthew@flowroute.com>
|
||||
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: urpmi
|
||||
short_description: Urpmi manager
|
||||
description:
|
||||
- Manages packages with I(urpmi) (such as for Mageia or Mandriva)
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- A list of package names to install, upgrade or remove.
|
||||
required: yes
|
||||
aliases: [ package, pkg ]
|
||||
state:
|
||||
description:
|
||||
- Indicates the desired package state.
|
||||
choices: [ absent, present ]
|
||||
default: present
|
||||
update_cache:
|
||||
description:
|
||||
- Update the package database first C(urpmi.update -a).
|
||||
type: bool
|
||||
default: 'no'
|
||||
no-recommends:
|
||||
description:
|
||||
- Corresponds to the C(--no-recommends) option for I(urpmi).
|
||||
type: bool
|
||||
default: 'yes'
|
||||
aliases: ['no-recommends']
|
||||
force:
|
||||
description:
|
||||
- Assume "yes" is the answer to any question urpmi has to ask.
|
||||
Corresponds to the C(--force) option for I(urpmi).
|
||||
type: bool
|
||||
default: 'yes'
|
||||
root:
|
||||
description:
|
||||
- Specifies an alternative install root, relative to which all packages will be installed.
|
||||
Corresponds to the C(--root) option for I(urpmi).
|
||||
default: /
|
||||
aliases: [ installroot ]
|
||||
author:
|
||||
- Philippe Makowski (@pmakowski)
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Install package foo
|
||||
urpmi:
|
||||
pkg: foo
|
||||
state: present
|
||||
|
||||
- name: Remove package foo
|
||||
urpmi:
|
||||
pkg: foo
|
||||
state: absent
|
||||
|
||||
- name: Remove packages foo and bar
|
||||
urpmi:
|
||||
pkg: foo,bar
|
||||
state: absent
|
||||
|
||||
- name: Update the package database (urpmi.update -a -q) and install bar (bar will be the updated if a newer version exists)
|
||||
- urpmi:
|
||||
name: bar
|
||||
state: present
|
||||
update_cache: yes
|
||||
'''
|
||||
|
||||
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def query_package(module, name, root):
|
||||
# rpm -q returns 0 if the package is installed,
|
||||
# 1 if it is not installed
|
||||
rpm_path = module.get_bin_path("rpm", True)
|
||||
cmd = "%s -q %s %s" % (rpm_path, name, root_option(root))
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
if rc == 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def query_package_provides(module, name, root):
|
||||
# rpm -q returns 0 if the package is installed,
|
||||
# 1 if it is not installed
|
||||
rpm_path = module.get_bin_path("rpm", True)
|
||||
cmd = "%s -q --whatprovides %s %s" % (rpm_path, name, root_option(root))
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
return rc == 0
|
||||
|
||||
|
||||
def update_package_db(module):
|
||||
|
||||
urpmiupdate_path = module.get_bin_path("urpmi.update", True)
|
||||
cmd = "%s -a -q" % (urpmiupdate_path,)
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
if rc != 0:
|
||||
module.fail_json(msg="could not update package db")
|
||||
|
||||
|
||||
def remove_packages(module, packages, root):
|
||||
|
||||
remove_c = 0
|
||||
# Using a for loop in case of error, we can report the package that failed
|
||||
for package in packages:
|
||||
# Query the package first, to see if we even need to remove
|
||||
if not query_package(module, package, root):
|
||||
continue
|
||||
|
||||
urpme_path = module.get_bin_path("urpme", True)
|
||||
cmd = "%s --auto %s %s" % (urpme_path, root_option(root), package)
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed to remove %s" % (package))
|
||||
|
||||
remove_c += 1
|
||||
|
||||
if remove_c > 0:
|
||||
|
||||
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
|
||||
|
||||
module.exit_json(changed=False, msg="package(s) already absent")
|
||||
|
||||
|
||||
def install_packages(module, pkgspec, root, force=True, no_recommends=True):
|
||||
|
||||
packages = ""
|
||||
for package in pkgspec:
|
||||
if not query_package_provides(module, package, root):
|
||||
packages += "'%s' " % package
|
||||
|
||||
if len(packages) != 0:
|
||||
if no_recommends:
|
||||
no_recommends_yes = '--no-recommends'
|
||||
else:
|
||||
no_recommends_yes = ''
|
||||
|
||||
if force:
|
||||
force_yes = '--force'
|
||||
else:
|
||||
force_yes = ''
|
||||
|
||||
urpmi_path = module.get_bin_path("urpmi", True)
|
||||
cmd = ("%s --auto %s --quiet %s %s %s" % (urpmi_path, force_yes,
|
||||
no_recommends_yes,
|
||||
root_option(root),
|
||||
packages))
|
||||
|
||||
rc, out, err = module.run_command(cmd)
|
||||
|
||||
for package in pkgspec:
|
||||
if not query_package_provides(module, package, root):
|
||||
module.fail_json(msg="'urpmi %s' failed: %s" % (package, err))
|
||||
|
||||
# urpmi always have 0 for exit code if --force is used
|
||||
if rc:
|
||||
module.fail_json(msg="'urpmi %s' failed: %s" % (packages, err))
|
||||
else:
|
||||
module.exit_json(changed=True, msg="%s present(s)" % packages)
|
||||
else:
|
||||
module.exit_json(changed=False)
|
||||
|
||||
|
||||
def root_option(root):
|
||||
if (root):
|
||||
return "--root=%s" % (root)
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
state=dict(type='str', default='installed',
|
||||
choices=['absent', 'installed', 'present', 'removed']),
|
||||
update_cache=dict(type='bool', default=False, aliases=['update-cache']),
|
||||
force=dict(type='bool', default=True),
|
||||
no_recommends=dict(type='bool', default=True, aliases=['no-recommends']),
|
||||
name=dict(type='list', required=True, aliases=['package', 'pkg']),
|
||||
root=dict(type='str', aliases=['installroot']),
|
||||
),
|
||||
)
|
||||
|
||||
p = module.params
|
||||
|
||||
if p['update_cache']:
|
||||
update_package_db(module)
|
||||
|
||||
if p['state'] in ['installed', 'present']:
|
||||
install_packages(module, p['name'], p['root'], p['force'], p['no_recommends'])
|
||||
|
||||
elif p['state'] in ['removed', 'absent']:
|
||||
remove_packages(module, p['name'], p['root'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
288
plugins/modules/packaging/os/xbps.py
Normal file
288
plugins/modules/packaging/os/xbps.py
Normal file
@@ -0,0 +1,288 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2016 Dino Occhialini <dino.occhialini@gmail.com>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: xbps
|
||||
short_description: Manage packages with XBPS
|
||||
description:
|
||||
- Manage packages with the XBPS package manager.
|
||||
author:
|
||||
- "Dino Occhialini (@dinoocch)"
|
||||
- "Michael Aldridge (@the-maldridge)"
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- Name of the package to install, upgrade, or remove.
|
||||
state:
|
||||
description:
|
||||
- Desired state of the package.
|
||||
default: "present"
|
||||
choices: ["present", "absent", "latest"]
|
||||
recurse:
|
||||
description:
|
||||
- When removing a package, also remove its dependencies, provided
|
||||
that they are not required by other packages and were not
|
||||
explicitly installed by a user.
|
||||
type: bool
|
||||
default: 'no'
|
||||
update_cache:
|
||||
description:
|
||||
- Whether or not to refresh the master package lists. This can be
|
||||
run as part of a package installation or as a separate step.
|
||||
type: bool
|
||||
default: 'yes'
|
||||
upgrade:
|
||||
description:
|
||||
- Whether or not to upgrade whole system
|
||||
type: bool
|
||||
default: 'no'
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Install package foo
|
||||
- xbps: name=foo state=present
|
||||
# Upgrade package foo
|
||||
- xbps: name=foo state=latest update_cache=yes
|
||||
# Remove packages foo and bar
|
||||
- xbps: name=foo,bar state=absent
|
||||
# Recursively remove package foo
|
||||
- xbps: name=foo state=absent recurse=yes
|
||||
# Update package cache
|
||||
- xbps: update_cache=yes
|
||||
# Upgrade packages
|
||||
- xbps: upgrade=yes
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
msg:
|
||||
description: Message about results
|
||||
returned: success
|
||||
type: str
|
||||
sample: "System Upgraded"
|
||||
packages:
|
||||
description: Packages that are affected/would be affected
|
||||
type: list
|
||||
sample: ["ansible"]
|
||||
returned: success
|
||||
'''
|
||||
|
||||
|
||||
import os
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def is_installed(xbps_output):
|
||||
"""Returns package install state"""
|
||||
return bool(len(xbps_output))
|
||||
|
||||
|
||||
def query_package(module, xbps_path, name, state="present"):
|
||||
"""Returns Package info"""
|
||||
if state == "present":
|
||||
lcmd = "%s %s" % (xbps_path['query'], name)
|
||||
lrc, lstdout, lstderr = module.run_command(lcmd, check_rc=False)
|
||||
if not is_installed(lstdout):
|
||||
# package is not installed locally
|
||||
return False, False
|
||||
|
||||
rcmd = "%s -Sun" % (xbps_path['install'])
|
||||
rrc, rstdout, rstderr = module.run_command(rcmd, check_rc=False)
|
||||
if rrc == 0 or rrc == 17:
|
||||
"""Return True to indicate that the package is installed locally,
|
||||
and the result of the version number comparison to determine if the
|
||||
package is up-to-date"""
|
||||
return True, name not in rstdout
|
||||
|
||||
return False, False
|
||||
|
||||
|
||||
def update_package_db(module, xbps_path):
|
||||
"""Returns True if update_package_db changed"""
|
||||
cmd = "%s -S" % (xbps_path['install'])
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Could not update package db")
|
||||
if "avg rate" in stdout:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def upgrade(module, xbps_path):
|
||||
"""Returns true is full upgrade succeeds"""
|
||||
cmdupgrade = "%s -uy" % (xbps_path['install'])
|
||||
cmdneedupgrade = "%s -un" % (xbps_path['install'])
|
||||
|
||||
rc, stdout, stderr = module.run_command(cmdneedupgrade, check_rc=False)
|
||||
if rc == 0:
|
||||
if(len(stdout.splitlines()) == 0):
|
||||
module.exit_json(changed=False, msg='Nothing to upgrade')
|
||||
else:
|
||||
rc, stdout, stderr = module.run_command(cmdupgrade, check_rc=False)
|
||||
if rc == 0:
|
||||
module.exit_json(changed=True, msg='System upgraded')
|
||||
else:
|
||||
module.fail_json(msg="Could not upgrade")
|
||||
else:
|
||||
module.fail_json(msg="Could not upgrade")
|
||||
|
||||
|
||||
def remove_packages(module, xbps_path, packages):
|
||||
"""Returns true if package removal succeeds"""
|
||||
changed_packages = []
|
||||
# Using a for loop in case of error, we can report the package that failed
|
||||
for package in packages:
|
||||
# Query the package first, to see if we even need to remove
|
||||
installed, updated = query_package(module, xbps_path, package)
|
||||
if not installed:
|
||||
continue
|
||||
|
||||
cmd = "%s -y %s" % (xbps_path['remove'], package)
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed to remove %s" % (package))
|
||||
|
||||
changed_packages.append(package)
|
||||
|
||||
if len(changed_packages) > 0:
|
||||
|
||||
module.exit_json(changed=True, msg="removed %s package(s)" %
|
||||
len(changed_packages), packages=changed_packages)
|
||||
|
||||
module.exit_json(changed=False, msg="package(s) already absent")
|
||||
|
||||
|
||||
def install_packages(module, xbps_path, state, packages):
|
||||
"""Returns true if package install succeeds."""
|
||||
toInstall = []
|
||||
for i, package in enumerate(packages):
|
||||
"""If the package is installed and state == present or state == latest
|
||||
and is up-to-date then skip"""
|
||||
installed, updated = query_package(module, xbps_path, package)
|
||||
if installed and (state == 'present' or
|
||||
(state == 'latest' and updated)):
|
||||
continue
|
||||
|
||||
toInstall.append(package)
|
||||
|
||||
if len(toInstall) == 0:
|
||||
module.exit_json(changed=False, msg="Nothing to Install")
|
||||
|
||||
cmd = "%s -y %s" % (xbps_path['install'], " ".join(toInstall))
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
|
||||
if rc != 0 and not (state == 'latest' and rc == 17):
|
||||
module.fail_json(msg="failed to install %s" % (package))
|
||||
|
||||
module.exit_json(changed=True, msg="installed %s package(s)"
|
||||
% (len(toInstall)),
|
||||
packages=toInstall)
|
||||
|
||||
module.exit_json(changed=False, msg="package(s) already installed",
|
||||
packages=[])
|
||||
|
||||
|
||||
def check_packages(module, xbps_path, packages, state):
|
||||
"""Returns change status of command"""
|
||||
would_be_changed = []
|
||||
for package in packages:
|
||||
installed, updated = query_package(module, xbps_path, package)
|
||||
if ((state in ["present", "latest"] and not installed) or
|
||||
(state == "absent" and installed) or
|
||||
(state == "latest" and not updated)):
|
||||
would_be_changed.append(package)
|
||||
if would_be_changed:
|
||||
if state == "absent":
|
||||
state = "removed"
|
||||
module.exit_json(changed=True, msg="%s package(s) would be %s" % (
|
||||
len(would_be_changed), state),
|
||||
packages=would_be_changed)
|
||||
else:
|
||||
module.exit_json(changed=False, msg="package(s) already %s" % state,
|
||||
packages=[])
|
||||
|
||||
|
||||
def main():
|
||||
"""Returns, calling appropriate command"""
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(default=None, aliases=['pkg', 'package'], type='list'),
|
||||
state=dict(default='present', choices=['present', 'installed',
|
||||
'latest', 'absent',
|
||||
'removed']),
|
||||
recurse=dict(default=False, type='bool'),
|
||||
force=dict(default=False, type='bool'),
|
||||
upgrade=dict(default=False, type='bool'),
|
||||
update_cache=dict(default=True, aliases=['update-cache'],
|
||||
type='bool')
|
||||
),
|
||||
required_one_of=[['name', 'update_cache', 'upgrade']],
|
||||
supports_check_mode=True)
|
||||
|
||||
xbps_path = dict()
|
||||
xbps_path['install'] = module.get_bin_path('xbps-install', True)
|
||||
xbps_path['query'] = module.get_bin_path('xbps-query', True)
|
||||
xbps_path['remove'] = module.get_bin_path('xbps-remove', True)
|
||||
|
||||
if not os.path.exists(xbps_path['install']):
|
||||
module.fail_json(msg="cannot find xbps, in path %s"
|
||||
% (xbps_path['install']))
|
||||
|
||||
p = module.params
|
||||
|
||||
# normalize the state parameter
|
||||
if p['state'] in ['present', 'installed']:
|
||||
p['state'] = 'present'
|
||||
elif p['state'] in ['absent', 'removed']:
|
||||
p['state'] = 'absent'
|
||||
|
||||
if p["update_cache"] and not module.check_mode:
|
||||
changed = update_package_db(module, xbps_path)
|
||||
if p['name'] is None and not p['upgrade']:
|
||||
if changed:
|
||||
module.exit_json(changed=True,
|
||||
msg='Updated the package master lists')
|
||||
else:
|
||||
module.exit_json(changed=False,
|
||||
msg='Package list already up to date')
|
||||
|
||||
if (p['update_cache'] and module.check_mode and not
|
||||
(p['name'] or p['upgrade'])):
|
||||
module.exit_json(changed=True,
|
||||
msg='Would have updated the package cache')
|
||||
|
||||
if p['upgrade']:
|
||||
upgrade(module, xbps_path)
|
||||
|
||||
if p['name']:
|
||||
pkgs = p['name']
|
||||
|
||||
if module.check_mode:
|
||||
check_packages(module, xbps_path, pkgs, p['state'])
|
||||
|
||||
if p['state'] in ['present', 'latest']:
|
||||
install_packages(module, xbps_path, p['state'], pkgs)
|
||||
elif p['state'] == 'absent':
|
||||
remove_packages(module, xbps_path, pkgs)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
531
plugins/modules/packaging/os/zypper.py
Normal file
531
plugins/modules/packaging/os/zypper.py
Normal file
@@ -0,0 +1,531 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, Patrick Callahan <pmc@patrickcallahan.com>
|
||||
# based on
|
||||
# openbsd_pkg
|
||||
# (c) 2013
|
||||
# Patrik Lundin <patrik.lundin.swe@gmail.com>
|
||||
#
|
||||
# yum
|
||||
# (c) 2012, Red Hat, Inc
|
||||
# Written by Seth Vidal <skvidal at fedoraproject.org>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: zypper
|
||||
author:
|
||||
- "Patrick Callahan (@dirtyharrycallahan)"
|
||||
- "Alexander Gubin (@alxgu)"
|
||||
- "Thomas O'Donnell (@andytom)"
|
||||
- "Robin Roth (@robinro)"
|
||||
- "Andrii Radyk (@AnderEnder)"
|
||||
short_description: Manage packages on SUSE and openSUSE
|
||||
description:
|
||||
- Manage packages on SUSE and openSUSE using the zypper and rpm tools.
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- Package name C(name) or package specifier or a list of either.
|
||||
- Can include a version like C(name=1.0), C(name>3.4) or C(name<=2.7). If a version is given, C(oldpackage) is implied and zypper is allowed to
|
||||
update the package within the version range given.
|
||||
- You can also pass a url or a local path to a rpm file.
|
||||
- When using state=latest, this can be '*', which updates all installed packages.
|
||||
required: true
|
||||
aliases: [ 'pkg' ]
|
||||
state:
|
||||
description:
|
||||
- C(present) will make sure the package is installed.
|
||||
C(latest) will make sure the latest version of the package is installed.
|
||||
C(absent) will make sure the specified package is not installed.
|
||||
C(dist-upgrade) will make sure the latest version of all installed packages from all enabled repositories is installed.
|
||||
- When using C(dist-upgrade), I(name) should be C('*').
|
||||
required: false
|
||||
choices: [ present, latest, absent, dist-upgrade ]
|
||||
default: "present"
|
||||
type:
|
||||
description:
|
||||
- The type of package to be operated on.
|
||||
required: false
|
||||
choices: [ package, patch, pattern, product, srcpackage, application ]
|
||||
default: "package"
|
||||
extra_args_precommand:
|
||||
required: false
|
||||
description:
|
||||
- Add additional global target options to C(zypper).
|
||||
- Options should be supplied in a single line as if given in the command line.
|
||||
disable_gpg_check:
|
||||
description:
|
||||
- Whether to disable to GPG signature checking of the package
|
||||
signature being installed. Has an effect only if state is
|
||||
I(present) or I(latest).
|
||||
required: false
|
||||
default: "no"
|
||||
type: bool
|
||||
disable_recommends:
|
||||
description:
|
||||
- Corresponds to the C(--no-recommends) option for I(zypper). Default behavior (C(yes)) modifies zypper's default behavior; C(no) does
|
||||
install recommended packages.
|
||||
required: false
|
||||
default: "yes"
|
||||
type: bool
|
||||
force:
|
||||
description:
|
||||
- Adds C(--force) option to I(zypper). Allows to downgrade packages and change vendor or architecture.
|
||||
required: false
|
||||
default: "no"
|
||||
type: bool
|
||||
force_resolution:
|
||||
description:
|
||||
- Adds C(--force-resolution) option to I(zypper). Allows to (un)install packages with conflicting requirements (resolver will choose a solution).
|
||||
required: false
|
||||
default: "no"
|
||||
type: bool
|
||||
update_cache:
|
||||
description:
|
||||
- Run the equivalent of C(zypper refresh) before the operation. Disabled in check mode.
|
||||
required: false
|
||||
default: "no"
|
||||
type: bool
|
||||
aliases: [ "refresh" ]
|
||||
oldpackage:
|
||||
description:
|
||||
- Adds C(--oldpackage) option to I(zypper). Allows to downgrade packages with less side-effects than force. This is implied as soon as a
|
||||
version is specified as part of the package name.
|
||||
required: false
|
||||
default: "no"
|
||||
type: bool
|
||||
extra_args:
|
||||
required: false
|
||||
description:
|
||||
- Add additional options to C(zypper) command.
|
||||
- Options should be supplied in a single line as if given in the command line.
|
||||
notes:
|
||||
- When used with a `loop:` each package will be processed individually,
|
||||
it is much more efficient to pass the list directly to the `name` option.
|
||||
# informational: requirements for nodes
|
||||
requirements:
|
||||
- "zypper >= 1.0 # included in openSUSE >= 11.1 or SUSE Linux Enterprise Server/Desktop >= 11.0"
|
||||
- python-xml
|
||||
- rpm
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Install "nmap"
|
||||
- zypper:
|
||||
name: nmap
|
||||
state: present
|
||||
|
||||
# Install apache2 with recommended packages
|
||||
- zypper:
|
||||
name: apache2
|
||||
state: present
|
||||
disable_recommends: no
|
||||
|
||||
# Apply a given patch
|
||||
- zypper:
|
||||
name: openSUSE-2016-128
|
||||
state: present
|
||||
type: patch
|
||||
|
||||
# Remove the "nmap" package
|
||||
- zypper:
|
||||
name: nmap
|
||||
state: absent
|
||||
|
||||
# Install the nginx rpm from a remote repo
|
||||
- zypper:
|
||||
name: 'http://nginx.org/packages/sles/12/x86_64/RPMS/nginx-1.8.0-1.sles12.ngx.x86_64.rpm'
|
||||
state: present
|
||||
|
||||
# Install local rpm file
|
||||
- zypper:
|
||||
name: /tmp/fancy-software.rpm
|
||||
state: present
|
||||
|
||||
# Update all packages
|
||||
- zypper:
|
||||
name: '*'
|
||||
state: latest
|
||||
|
||||
# Apply all available patches
|
||||
- zypper:
|
||||
name: '*'
|
||||
state: latest
|
||||
type: patch
|
||||
|
||||
# Perform a dist-upgrade with additional arguments
|
||||
- zypper:
|
||||
name: '*'
|
||||
state: dist-upgrade
|
||||
extra_args: '--no-allow-vendor-change --allow-arch-change'
|
||||
|
||||
# Refresh repositories and update package "openssl"
|
||||
- zypper:
|
||||
name: openssl
|
||||
state: present
|
||||
update_cache: yes
|
||||
|
||||
# Install specific version (possible comparisons: <, >, <=, >=, =)
|
||||
- zypper:
|
||||
name: 'docker>=1.10'
|
||||
state: present
|
||||
|
||||
# Wait 20 seconds to acquire the lock before failing
|
||||
- zypper:
|
||||
name: mosh
|
||||
state: present
|
||||
environment:
|
||||
ZYPP_LOCK_TIMEOUT: 20
|
||||
'''
|
||||
|
||||
import xml
|
||||
import re
|
||||
from xml.dom.minidom import parseString as parseXML
|
||||
from ansible.module_utils.six import iteritems
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
# import module snippets
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
class Package:
|
||||
def __init__(self, name, prefix, version):
|
||||
self.name = name
|
||||
self.prefix = prefix
|
||||
self.version = version
|
||||
self.shouldinstall = (prefix == '+')
|
||||
|
||||
def __str__(self):
|
||||
return self.prefix + self.name + self.version
|
||||
|
||||
|
||||
def split_name_version(name):
|
||||
"""splits of the package name and desired version
|
||||
|
||||
example formats:
|
||||
- docker>=1.10
|
||||
- apache=2.4
|
||||
|
||||
Allowed version specifiers: <, >, <=, >=, =
|
||||
Allowed version format: [0-9.-]*
|
||||
|
||||
Also allows a prefix indicating remove "-", "~" or install "+"
|
||||
"""
|
||||
|
||||
prefix = ''
|
||||
if name[0] in ['-', '~', '+']:
|
||||
prefix = name[0]
|
||||
name = name[1:]
|
||||
if prefix == '~':
|
||||
prefix = '-'
|
||||
|
||||
version_check = re.compile('^(.*?)((?:<|>|<=|>=|=)[0-9.-]*)?$')
|
||||
try:
|
||||
reres = version_check.match(name)
|
||||
name, version = reres.groups()
|
||||
if version is None:
|
||||
version = ''
|
||||
return prefix, name, version
|
||||
except Exception:
|
||||
return prefix, name, ''
|
||||
|
||||
|
||||
def get_want_state(names, remove=False):
|
||||
packages = []
|
||||
urls = []
|
||||
for name in names:
|
||||
if '://' in name or name.endswith('.rpm'):
|
||||
urls.append(name)
|
||||
else:
|
||||
prefix, pname, version = split_name_version(name)
|
||||
if prefix not in ['-', '+']:
|
||||
if remove:
|
||||
prefix = '-'
|
||||
else:
|
||||
prefix = '+'
|
||||
packages.append(Package(pname, prefix, version))
|
||||
return packages, urls
|
||||
|
||||
|
||||
def get_installed_state(m, packages):
|
||||
"get installed state of packages"
|
||||
|
||||
cmd = get_cmd(m, 'search')
|
||||
cmd.extend(['--match-exact', '--details', '--installed-only'])
|
||||
cmd.extend([p.name for p in packages])
|
||||
return parse_zypper_xml(m, cmd, fail_not_found=False)[0]
|
||||
|
||||
|
||||
def parse_zypper_xml(m, cmd, fail_not_found=True, packages=None):
|
||||
rc, stdout, stderr = m.run_command(cmd, check_rc=False)
|
||||
|
||||
try:
|
||||
dom = parseXML(stdout)
|
||||
except xml.parsers.expat.ExpatError as exc:
|
||||
m.fail_json(msg="Failed to parse zypper xml output: %s" % to_native(exc),
|
||||
rc=rc, stdout=stdout, stderr=stderr, cmd=cmd)
|
||||
|
||||
if rc == 104:
|
||||
# exit code 104 is ZYPPER_EXIT_INF_CAP_NOT_FOUND (no packages found)
|
||||
if fail_not_found:
|
||||
errmsg = dom.getElementsByTagName('message')[-1].childNodes[0].data
|
||||
m.fail_json(msg=errmsg, rc=rc, stdout=stdout, stderr=stderr, cmd=cmd)
|
||||
else:
|
||||
return {}, rc, stdout, stderr
|
||||
elif rc in [0, 106, 103]:
|
||||
# zypper exit codes
|
||||
# 0: success
|
||||
# 106: signature verification failed
|
||||
# 103: zypper was upgraded, run same command again
|
||||
if packages is None:
|
||||
firstrun = True
|
||||
packages = {}
|
||||
solvable_list = dom.getElementsByTagName('solvable')
|
||||
for solvable in solvable_list:
|
||||
name = solvable.getAttribute('name')
|
||||
packages[name] = {}
|
||||
packages[name]['version'] = solvable.getAttribute('edition')
|
||||
packages[name]['oldversion'] = solvable.getAttribute('edition-old')
|
||||
status = solvable.getAttribute('status')
|
||||
packages[name]['installed'] = status == "installed"
|
||||
packages[name]['group'] = solvable.parentNode.nodeName
|
||||
if rc == 103 and firstrun:
|
||||
# if this was the first run and it failed with 103
|
||||
# run zypper again with the same command to complete update
|
||||
return parse_zypper_xml(m, cmd, fail_not_found=fail_not_found, packages=packages)
|
||||
|
||||
return packages, rc, stdout, stderr
|
||||
m.fail_json(msg='Zypper run command failed with return code %s.' % rc, rc=rc, stdout=stdout, stderr=stderr, cmd=cmd)
|
||||
|
||||
|
||||
def get_cmd(m, subcommand):
|
||||
"puts together the basic zypper command arguments with those passed to the module"
|
||||
is_install = subcommand in ['install', 'update', 'patch', 'dist-upgrade']
|
||||
is_refresh = subcommand == 'refresh'
|
||||
cmd = ['/usr/bin/zypper', '--quiet', '--non-interactive', '--xmlout']
|
||||
if m.params['extra_args_precommand']:
|
||||
args_list = m.params['extra_args_precommand'].split()
|
||||
cmd.extend(args_list)
|
||||
# add global options before zypper command
|
||||
if (is_install or is_refresh) and m.params['disable_gpg_check']:
|
||||
cmd.append('--no-gpg-checks')
|
||||
|
||||
if subcommand == 'search':
|
||||
cmd.append('--disable-repositories')
|
||||
|
||||
cmd.append(subcommand)
|
||||
if subcommand not in ['patch', 'dist-upgrade'] and not is_refresh:
|
||||
cmd.extend(['--type', m.params['type']])
|
||||
if m.check_mode and subcommand != 'search':
|
||||
cmd.append('--dry-run')
|
||||
if is_install:
|
||||
cmd.append('--auto-agree-with-licenses')
|
||||
if m.params['disable_recommends']:
|
||||
cmd.append('--no-recommends')
|
||||
if m.params['force']:
|
||||
cmd.append('--force')
|
||||
if m.params['force_resolution']:
|
||||
cmd.append('--force-resolution')
|
||||
if m.params['oldpackage']:
|
||||
cmd.append('--oldpackage')
|
||||
if m.params['extra_args']:
|
||||
args_list = m.params['extra_args'].split(' ')
|
||||
cmd.extend(args_list)
|
||||
|
||||
return cmd
|
||||
|
||||
|
||||
def set_diff(m, retvals, result):
|
||||
# TODO: if there is only one package, set before/after to version numbers
|
||||
packages = {'installed': [], 'removed': [], 'upgraded': []}
|
||||
if result:
|
||||
for p in result:
|
||||
group = result[p]['group']
|
||||
if group == 'to-upgrade':
|
||||
versions = ' (' + result[p]['oldversion'] + ' => ' + result[p]['version'] + ')'
|
||||
packages['upgraded'].append(p + versions)
|
||||
elif group == 'to-install':
|
||||
packages['installed'].append(p)
|
||||
elif group == 'to-remove':
|
||||
packages['removed'].append(p)
|
||||
|
||||
output = ''
|
||||
for state in packages:
|
||||
if packages[state]:
|
||||
output += state + ': ' + ', '.join(packages[state]) + '\n'
|
||||
if 'diff' not in retvals:
|
||||
retvals['diff'] = {}
|
||||
if 'prepared' not in retvals['diff']:
|
||||
retvals['diff']['prepared'] = output
|
||||
else:
|
||||
retvals['diff']['prepared'] += '\n' + output
|
||||
|
||||
|
||||
def package_present(m, name, want_latest):
|
||||
"install and update (if want_latest) the packages in name_install, while removing the packages in name_remove"
|
||||
retvals = {'rc': 0, 'stdout': '', 'stderr': ''}
|
||||
packages, urls = get_want_state(name)
|
||||
|
||||
# add oldpackage flag when a version is given to allow downgrades
|
||||
if any(p.version for p in packages):
|
||||
m.params['oldpackage'] = True
|
||||
|
||||
if not want_latest:
|
||||
# for state=present: filter out already installed packages
|
||||
# if a version is given leave the package in to let zypper handle the version
|
||||
# resolution
|
||||
packageswithoutversion = [p for p in packages if not p.version]
|
||||
prerun_state = get_installed_state(m, packageswithoutversion)
|
||||
# generate lists of packages to install or remove
|
||||
packages = [p for p in packages if p.shouldinstall != (p.name in prerun_state)]
|
||||
|
||||
if not packages and not urls:
|
||||
# nothing to install/remove and nothing to update
|
||||
return None, retvals
|
||||
|
||||
# zypper install also updates packages
|
||||
cmd = get_cmd(m, 'install')
|
||||
cmd.append('--')
|
||||
cmd.extend(urls)
|
||||
# pass packages to zypper
|
||||
# allow for + or - prefixes in install/remove lists
|
||||
# also add version specifier if given
|
||||
# do this in one zypper run to allow for dependency-resolution
|
||||
# for example "-exim postfix" runs without removing packages depending on mailserver
|
||||
cmd.extend([str(p) for p in packages])
|
||||
|
||||
retvals['cmd'] = cmd
|
||||
result, retvals['rc'], retvals['stdout'], retvals['stderr'] = parse_zypper_xml(m, cmd)
|
||||
|
||||
return result, retvals
|
||||
|
||||
|
||||
def package_update_all(m):
|
||||
"run update or patch on all available packages"
|
||||
|
||||
retvals = {'rc': 0, 'stdout': '', 'stderr': ''}
|
||||
if m.params['type'] == 'patch':
|
||||
cmdname = 'patch'
|
||||
elif m.params['state'] == 'dist-upgrade':
|
||||
cmdname = 'dist-upgrade'
|
||||
else:
|
||||
cmdname = 'update'
|
||||
|
||||
cmd = get_cmd(m, cmdname)
|
||||
retvals['cmd'] = cmd
|
||||
result, retvals['rc'], retvals['stdout'], retvals['stderr'] = parse_zypper_xml(m, cmd)
|
||||
return result, retvals
|
||||
|
||||
|
||||
def package_absent(m, name):
|
||||
"remove the packages in name"
|
||||
retvals = {'rc': 0, 'stdout': '', 'stderr': ''}
|
||||
# Get package state
|
||||
packages, urls = get_want_state(name, remove=True)
|
||||
if any(p.prefix == '+' for p in packages):
|
||||
m.fail_json(msg="Can not combine '+' prefix with state=remove/absent.")
|
||||
if urls:
|
||||
m.fail_json(msg="Can not remove via URL.")
|
||||
if m.params['type'] == 'patch':
|
||||
m.fail_json(msg="Can not remove patches.")
|
||||
prerun_state = get_installed_state(m, packages)
|
||||
packages = [p for p in packages if p.name in prerun_state]
|
||||
|
||||
if not packages:
|
||||
return None, retvals
|
||||
|
||||
cmd = get_cmd(m, 'remove')
|
||||
cmd.extend([p.name + p.version for p in packages])
|
||||
|
||||
retvals['cmd'] = cmd
|
||||
result, retvals['rc'], retvals['stdout'], retvals['stderr'] = parse_zypper_xml(m, cmd)
|
||||
return result, retvals
|
||||
|
||||
|
||||
def repo_refresh(m):
|
||||
"update the repositories"
|
||||
retvals = {'rc': 0, 'stdout': '', 'stderr': ''}
|
||||
|
||||
cmd = get_cmd(m, 'refresh')
|
||||
|
||||
retvals['cmd'] = cmd
|
||||
result, retvals['rc'], retvals['stdout'], retvals['stderr'] = parse_zypper_xml(m, cmd)
|
||||
|
||||
return retvals
|
||||
|
||||
# ===========================================
|
||||
# Main control flow
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(required=True, aliases=['pkg'], type='list'),
|
||||
state=dict(required=False, default='present', choices=['absent', 'installed', 'latest', 'present', 'removed', 'dist-upgrade']),
|
||||
type=dict(required=False, default='package', choices=['package', 'patch', 'pattern', 'product', 'srcpackage', 'application']),
|
||||
extra_args_precommand=dict(required=False, default=None),
|
||||
disable_gpg_check=dict(required=False, default='no', type='bool'),
|
||||
disable_recommends=dict(required=False, default='yes', type='bool'),
|
||||
force=dict(required=False, default='no', type='bool'),
|
||||
force_resolution=dict(required=False, default='no', type='bool'),
|
||||
update_cache=dict(required=False, aliases=['refresh'], default='no', type='bool'),
|
||||
oldpackage=dict(required=False, default='no', type='bool'),
|
||||
extra_args=dict(required=False, default=None),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
name = module.params['name']
|
||||
state = module.params['state']
|
||||
update_cache = module.params['update_cache']
|
||||
|
||||
# remove empty strings from package list
|
||||
name = list(filter(None, name))
|
||||
|
||||
# Refresh repositories
|
||||
if update_cache and not module.check_mode:
|
||||
retvals = repo_refresh(module)
|
||||
|
||||
if retvals['rc'] != 0:
|
||||
module.fail_json(msg="Zypper refresh run failed.", **retvals)
|
||||
|
||||
# Perform requested action
|
||||
if name == ['*'] and state in ['latest', 'dist-upgrade']:
|
||||
packages_changed, retvals = package_update_all(module)
|
||||
elif name != ['*'] and state == 'dist-upgrade':
|
||||
module.fail_json(msg="Can not dist-upgrade specific packages.")
|
||||
else:
|
||||
if state in ['absent', 'removed']:
|
||||
packages_changed, retvals = package_absent(module, name)
|
||||
elif state in ['installed', 'present', 'latest']:
|
||||
packages_changed, retvals = package_present(module, name, state == 'latest')
|
||||
|
||||
retvals['changed'] = retvals['rc'] == 0 and bool(packages_changed)
|
||||
|
||||
if module._diff:
|
||||
set_diff(module, retvals, packages_changed)
|
||||
|
||||
if retvals['rc'] != 0:
|
||||
module.fail_json(msg="Zypper run failed.", **retvals)
|
||||
|
||||
if not retvals['changed']:
|
||||
del retvals['stdout']
|
||||
del retvals['stderr']
|
||||
|
||||
module.exit_json(name=name, state=state, update_cache=update_cache, **retvals)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
391
plugins/modules/packaging/os/zypper_repository.py
Normal file
391
plugins/modules/packaging/os/zypper_repository.py
Normal file
@@ -0,0 +1,391 @@
|
||||
#!/usr/bin/python
|
||||
# encoding: utf-8
|
||||
|
||||
# (c) 2013, Matthias Vogelgesang <matthias.vogelgesang@gmail.com>
|
||||
# (c) 2014, Justin Lecher <jlec@gentoo.org>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: zypper_repository
|
||||
author: "Matthias Vogelgesang (@matze)"
|
||||
short_description: Add and remove Zypper repositories
|
||||
description:
|
||||
- Add or remove Zypper repositories on SUSE and openSUSE
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- A name for the repository. Not required when adding repofiles.
|
||||
repo:
|
||||
description:
|
||||
- URI of the repository or .repo file. Required when state=present.
|
||||
state:
|
||||
description:
|
||||
- A source string state.
|
||||
choices: [ "absent", "present" ]
|
||||
default: "present"
|
||||
description:
|
||||
description:
|
||||
- A description of the repository
|
||||
disable_gpg_check:
|
||||
description:
|
||||
- Whether to disable GPG signature checking of
|
||||
all packages. Has an effect only if state is
|
||||
I(present).
|
||||
- Needs zypper version >= 1.6.2.
|
||||
type: bool
|
||||
default: 'no'
|
||||
autorefresh:
|
||||
description:
|
||||
- Enable autorefresh of the repository.
|
||||
type: bool
|
||||
default: 'yes'
|
||||
aliases: [ "refresh" ]
|
||||
priority:
|
||||
description:
|
||||
- Set priority of repository. Packages will always be installed
|
||||
from the repository with the smallest priority number.
|
||||
- Needs zypper version >= 1.12.25.
|
||||
overwrite_multiple:
|
||||
description:
|
||||
- Overwrite multiple repository entries, if repositories with both name and
|
||||
URL already exist.
|
||||
type: bool
|
||||
default: 'no'
|
||||
auto_import_keys:
|
||||
description:
|
||||
- Automatically import the gpg signing key of the new or changed repository.
|
||||
- Has an effect only if state is I(present). Has no effect on existing (unchanged) repositories or in combination with I(absent).
|
||||
- Implies runrefresh.
|
||||
- Only works with C(.repo) files if `name` is given explicitly.
|
||||
type: bool
|
||||
default: 'no'
|
||||
runrefresh:
|
||||
description:
|
||||
- Refresh the package list of the given repository.
|
||||
- Can be used with repo=* to refresh all repositories.
|
||||
type: bool
|
||||
default: 'no'
|
||||
enabled:
|
||||
description:
|
||||
- Set repository to enabled (or disabled).
|
||||
type: bool
|
||||
default: 'yes'
|
||||
|
||||
|
||||
requirements:
|
||||
- "zypper >= 1.0 # included in openSUSE >= 11.1 or SUSE Linux Enterprise Server/Desktop >= 11.0"
|
||||
- python-xml
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Add NVIDIA repository for graphics drivers
|
||||
- zypper_repository:
|
||||
name: nvidia-repo
|
||||
repo: 'ftp://download.nvidia.com/opensuse/12.2'
|
||||
state: present
|
||||
|
||||
# Remove NVIDIA repository
|
||||
- zypper_repository:
|
||||
name: nvidia-repo
|
||||
repo: 'ftp://download.nvidia.com/opensuse/12.2'
|
||||
state: absent
|
||||
|
||||
# Add python development repository
|
||||
- zypper_repository:
|
||||
repo: 'http://download.opensuse.org/repositories/devel:/languages:/python/SLE_11_SP3/devel:languages:python.repo'
|
||||
|
||||
# Refresh all repos
|
||||
- zypper_repository:
|
||||
repo: '*'
|
||||
runrefresh: yes
|
||||
|
||||
# Add a repo and add it's gpg key
|
||||
- zypper_repository:
|
||||
repo: 'http://download.opensuse.org/repositories/systemsmanagement/openSUSE_Leap_42.1/'
|
||||
auto_import_keys: yes
|
||||
|
||||
# Force refresh of a repository
|
||||
- zypper_repository:
|
||||
repo: 'http://my_internal_ci_repo/repo'
|
||||
name: my_ci_repo
|
||||
state: present
|
||||
runrefresh: yes
|
||||
'''
|
||||
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
REPO_OPTS = ['alias', 'name', 'priority', 'enabled', 'autorefresh', 'gpgcheck']
|
||||
|
||||
|
||||
def _get_cmd(*args):
|
||||
"""Combines the non-interactive zypper command with arguments/subcommands"""
|
||||
cmd = ['/usr/bin/zypper', '--quiet', '--non-interactive']
|
||||
cmd.extend(args)
|
||||
|
||||
return cmd
|
||||
|
||||
|
||||
def _parse_repos(module):
|
||||
"""parses the output of zypper --xmlout repos and return a parse repo dictionary"""
|
||||
cmd = _get_cmd('--xmlout', 'repos')
|
||||
|
||||
from xml.dom.minidom import parseString as parseXML
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
if rc == 0:
|
||||
repos = []
|
||||
dom = parseXML(stdout)
|
||||
repo_list = dom.getElementsByTagName('repo')
|
||||
for repo in repo_list:
|
||||
opts = {}
|
||||
for o in REPO_OPTS:
|
||||
opts[o] = repo.getAttribute(o)
|
||||
opts['url'] = repo.getElementsByTagName('url')[0].firstChild.data
|
||||
# A repo can be uniquely identified by an alias + url
|
||||
repos.append(opts)
|
||||
return repos
|
||||
# exit code 6 is ZYPPER_EXIT_NO_REPOS (no repositories defined)
|
||||
elif rc == 6:
|
||||
return []
|
||||
else:
|
||||
module.fail_json(msg='Failed to execute "%s"' % " ".join(cmd), rc=rc, stdout=stdout, stderr=stderr)
|
||||
|
||||
|
||||
def _repo_changes(realrepo, repocmp):
|
||||
"Check whether the 2 given repos have different settings."
|
||||
for k in repocmp:
|
||||
if repocmp[k] and k not in realrepo:
|
||||
return True
|
||||
|
||||
for k, v in realrepo.items():
|
||||
if k in repocmp and repocmp[k]:
|
||||
valold = str(repocmp[k] or "")
|
||||
valnew = v or ""
|
||||
if k == "url":
|
||||
valold, valnew = valold.rstrip("/"), valnew.rstrip("/")
|
||||
if valold != valnew:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def repo_exists(module, repodata, overwrite_multiple):
|
||||
"""Check whether the repository already exists.
|
||||
|
||||
returns (exists, mod, old_repos)
|
||||
exists: whether a matching (name, URL) repo exists
|
||||
mod: whether there are changes compared to the existing repo
|
||||
old_repos: list of matching repos
|
||||
"""
|
||||
existing_repos = _parse_repos(module)
|
||||
|
||||
# look for repos that have matching alias or url to the one searched
|
||||
repos = []
|
||||
for kw in ['alias', 'url']:
|
||||
name = repodata[kw]
|
||||
for oldr in existing_repos:
|
||||
if repodata[kw] == oldr[kw] and oldr not in repos:
|
||||
repos.append(oldr)
|
||||
|
||||
if len(repos) == 0:
|
||||
# Repo does not exist yet
|
||||
return (False, False, None)
|
||||
elif len(repos) == 1:
|
||||
# Found an existing repo, look for changes
|
||||
has_changes = _repo_changes(repos[0], repodata)
|
||||
return (True, has_changes, repos)
|
||||
elif len(repos) >= 2:
|
||||
if overwrite_multiple:
|
||||
# Found two repos and want to overwrite_multiple
|
||||
return (True, True, repos)
|
||||
else:
|
||||
errmsg = 'More than one repo matched "%s": "%s".' % (name, repos)
|
||||
errmsg += ' Use overwrite_multiple to allow more than one repo to be overwritten'
|
||||
module.fail_json(msg=errmsg)
|
||||
|
||||
|
||||
def addmodify_repo(module, repodata, old_repos, zypper_version, warnings):
|
||||
"Adds the repo, removes old repos before, that would conflict."
|
||||
repo = repodata['url']
|
||||
cmd = _get_cmd('addrepo', '--check')
|
||||
if repodata['name']:
|
||||
cmd.extend(['--name', repodata['name']])
|
||||
|
||||
# priority on addrepo available since 1.12.25
|
||||
# https://github.com/openSUSE/zypper/blob/b9b3cb6db76c47dc4c47e26f6a4d2d4a0d12b06d/package/zypper.changes#L327-L336
|
||||
if repodata['priority']:
|
||||
if zypper_version >= LooseVersion('1.12.25'):
|
||||
cmd.extend(['--priority', str(repodata['priority'])])
|
||||
else:
|
||||
warnings.append("Setting priority only available for zypper >= 1.12.25. Ignoring priority argument.")
|
||||
|
||||
if repodata['enabled'] == '0':
|
||||
cmd.append('--disable')
|
||||
|
||||
# gpgcheck available since 1.6.2
|
||||
# https://github.com/openSUSE/zypper/blob/b9b3cb6db76c47dc4c47e26f6a4d2d4a0d12b06d/package/zypper.changes#L2446-L2449
|
||||
# the default changed in the past, so don't assume a default here and show warning for old zypper versions
|
||||
if zypper_version >= LooseVersion('1.6.2'):
|
||||
if repodata['gpgcheck'] == '1':
|
||||
cmd.append('--gpgcheck')
|
||||
else:
|
||||
cmd.append('--no-gpgcheck')
|
||||
else:
|
||||
warnings.append("Enabling/disabling gpgcheck only available for zypper >= 1.6.2. Using zypper default value.")
|
||||
|
||||
if repodata['autorefresh'] == '1':
|
||||
cmd.append('--refresh')
|
||||
|
||||
cmd.append(repo)
|
||||
|
||||
if not repo.endswith('.repo'):
|
||||
cmd.append(repodata['alias'])
|
||||
|
||||
if old_repos is not None:
|
||||
for oldrepo in old_repos:
|
||||
remove_repo(module, oldrepo['url'])
|
||||
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
|
||||
return rc, stdout, stderr
|
||||
|
||||
|
||||
def remove_repo(module, repo):
|
||||
"Removes the repo."
|
||||
cmd = _get_cmd('removerepo', repo)
|
||||
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=True)
|
||||
return rc, stdout, stderr
|
||||
|
||||
|
||||
def get_zypper_version(module):
|
||||
rc, stdout, stderr = module.run_command(['/usr/bin/zypper', '--version'])
|
||||
if rc != 0 or not stdout.startswith('zypper '):
|
||||
return LooseVersion('1.0')
|
||||
return LooseVersion(stdout.split()[1])
|
||||
|
||||
|
||||
def runrefreshrepo(module, auto_import_keys=False, shortname=None):
|
||||
"Forces zypper to refresh repo metadata."
|
||||
if auto_import_keys:
|
||||
cmd = _get_cmd('--gpg-auto-import-keys', 'refresh', '--force')
|
||||
else:
|
||||
cmd = _get_cmd('refresh', '--force')
|
||||
if shortname is not None:
|
||||
cmd.extend(['-r', shortname])
|
||||
|
||||
rc, stdout, stderr = module.run_command(cmd, check_rc=True)
|
||||
return rc, stdout, stderr
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(required=False),
|
||||
repo=dict(required=False),
|
||||
state=dict(choices=['present', 'absent'], default='present'),
|
||||
runrefresh=dict(required=False, default='no', type='bool'),
|
||||
description=dict(required=False),
|
||||
disable_gpg_check=dict(required=False, default=False, type='bool'),
|
||||
autorefresh=dict(required=False, default=True, type='bool', aliases=['refresh']),
|
||||
priority=dict(required=False, type='int'),
|
||||
enabled=dict(required=False, default=True, type='bool'),
|
||||
overwrite_multiple=dict(required=False, default=False, type='bool'),
|
||||
auto_import_keys=dict(required=False, default=False, type='bool'),
|
||||
),
|
||||
supports_check_mode=False,
|
||||
required_one_of=[['state', 'runrefresh']],
|
||||
)
|
||||
|
||||
repo = module.params['repo']
|
||||
alias = module.params['name']
|
||||
state = module.params['state']
|
||||
overwrite_multiple = module.params['overwrite_multiple']
|
||||
auto_import_keys = module.params['auto_import_keys']
|
||||
runrefresh = module.params['runrefresh']
|
||||
|
||||
zypper_version = get_zypper_version(module)
|
||||
warnings = [] # collect warning messages for final output
|
||||
|
||||
repodata = {
|
||||
'url': repo,
|
||||
'alias': alias,
|
||||
'name': module.params['description'],
|
||||
'priority': module.params['priority'],
|
||||
}
|
||||
# rewrite bools in the language that zypper lr -x provides for easier comparison
|
||||
if module.params['enabled']:
|
||||
repodata['enabled'] = '1'
|
||||
else:
|
||||
repodata['enabled'] = '0'
|
||||
if module.params['disable_gpg_check']:
|
||||
repodata['gpgcheck'] = '0'
|
||||
else:
|
||||
repodata['gpgcheck'] = '1'
|
||||
if module.params['autorefresh']:
|
||||
repodata['autorefresh'] = '1'
|
||||
else:
|
||||
repodata['autorefresh'] = '0'
|
||||
|
||||
def exit_unchanged():
|
||||
module.exit_json(changed=False, repodata=repodata, state=state)
|
||||
|
||||
# Check run-time module parameters
|
||||
if repo == '*' or alias == '*':
|
||||
if runrefresh:
|
||||
runrefreshrepo(module, auto_import_keys)
|
||||
module.exit_json(changed=False, runrefresh=True)
|
||||
else:
|
||||
module.fail_json(msg='repo=* can only be used with the runrefresh option.')
|
||||
|
||||
if state == 'present' and not repo:
|
||||
module.fail_json(msg='Module option state=present requires repo')
|
||||
if state == 'absent' and not repo and not alias:
|
||||
module.fail_json(msg='Alias or repo parameter required when state=absent')
|
||||
|
||||
if repo and repo.endswith('.repo'):
|
||||
if alias:
|
||||
module.fail_json(msg='Incompatible option: \'name\'. Do not use name when adding .repo files')
|
||||
else:
|
||||
if not alias and state == "present":
|
||||
module.fail_json(msg='Name required when adding non-repo files.')
|
||||
|
||||
exists, mod, old_repos = repo_exists(module, repodata, overwrite_multiple)
|
||||
|
||||
if repo:
|
||||
shortname = repo
|
||||
else:
|
||||
shortname = alias
|
||||
|
||||
if state == 'present':
|
||||
if exists and not mod:
|
||||
if runrefresh:
|
||||
runrefreshrepo(module, auto_import_keys, shortname)
|
||||
exit_unchanged()
|
||||
rc, stdout, stderr = addmodify_repo(module, repodata, old_repos, zypper_version, warnings)
|
||||
if rc == 0 and (runrefresh or auto_import_keys):
|
||||
runrefreshrepo(module, auto_import_keys, shortname)
|
||||
elif state == 'absent':
|
||||
if not exists:
|
||||
exit_unchanged()
|
||||
rc, stdout, stderr = remove_repo(module, shortname)
|
||||
|
||||
if rc == 0:
|
||||
module.exit_json(changed=True, repodata=repodata, state=state, warnings=warnings)
|
||||
else:
|
||||
module.fail_json(msg="Zypper failed with rc %s" % rc, rc=rc, stdout=stdout, stderr=stderr, repodata=repodata, state=state, warnings=warnings)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Reference in New Issue
Block a user