mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-05-08 06:12:51 +00:00
Relocating extras into lib/ansible/modules/ after merge
This commit is contained in:
committed by
Matt Clay
parent
c65ba07d2c
commit
011ea55a8f
453
lib/ansible/modules/web_infrastructure/apache2_mod_proxy.py
Normal file
453
lib/ansible/modules/web_infrastructure/apache2_mod_proxy.py
Normal file
@@ -0,0 +1,453 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2016, Olivier Boukili <boukili.olivier@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible.
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
ANSIBLE_METADATA = {'status': ['preview'],
|
||||
'supported_by': 'community',
|
||||
'version': '1.0'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: apache2_mod_proxy
|
||||
version_added: "2.2"
|
||||
short_description: Set and/or get members' attributes of an Apache httpd 2.4 mod_proxy balancer pool
|
||||
description:
|
||||
- Set and/or get members' attributes of an Apache httpd 2.4 mod_proxy balancer
|
||||
pool, using HTTP POST and GET requests. The httpd mod_proxy balancer-member
|
||||
status page has to be enabled and accessible, as this module relies on parsing
|
||||
this page. This module supports ansible check_mode, and requires BeautifulSoup
|
||||
python module.
|
||||
options:
|
||||
balancer_url_suffix:
|
||||
default: /balancer-manager/
|
||||
description:
|
||||
- Suffix of the balancer pool url required to access the balancer pool
|
||||
status page (e.g. balancer_vhost[:port]/balancer_url_suffix).
|
||||
required: false
|
||||
balancer_vhost:
|
||||
default: None
|
||||
description:
|
||||
- (ipv4|ipv6|fqdn):port of the Apache httpd 2.4 mod_proxy balancer pool.
|
||||
required: true
|
||||
member_host:
|
||||
default: None
|
||||
description:
|
||||
- (ipv4|ipv6|fqdn) of the balancer member to get or to set attributes to.
|
||||
Port number is autodetected and should not be specified here.
|
||||
If undefined, apache2_mod_proxy module will return a members list of
|
||||
dictionaries of all the current balancer pool members' attributes.
|
||||
required: false
|
||||
state:
|
||||
default: None
|
||||
description:
|
||||
- Desired state of the member host.
|
||||
(absent|disabled),drained,hot_standby,ignore_errors can be
|
||||
simultaneously invoked by separating them with a comma (e.g. state=drained,ignore_errors).
|
||||
required: false
|
||||
choices: ["present", "absent", "enabled", "disabled", "drained", "hot_standby", "ignore_errors"]
|
||||
tls:
|
||||
default: false
|
||||
description:
|
||||
- Use https to access balancer management page.
|
||||
choices: ["true", "false"]
|
||||
validate_certs:
|
||||
default: true
|
||||
description:
|
||||
- Validate ssl/tls certificates.
|
||||
choices: ["true", "false"]
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Get all current balancer pool members' attributes:
|
||||
- apache2_mod_proxy:
|
||||
balancer_vhost: 10.0.0.2
|
||||
|
||||
# Get a specific member's attributes:
|
||||
- apache2_mod_proxy:
|
||||
balancer_vhost: myws.mydomain.org
|
||||
balancer_suffix: /lb/
|
||||
member_host: node1.myws.mydomain.org
|
||||
|
||||
# Enable all balancer pool members:
|
||||
- apache2_mod_proxy:
|
||||
balancer_vhost: '{{ myloadbalancer_host }}'
|
||||
register: result
|
||||
- apache2_mod_proxy:
|
||||
balancer_vhost: '{{ myloadbalancer_host }}'
|
||||
member_host: '{{ item.host }}'
|
||||
state: present
|
||||
with_items: '{{ result.members }}'
|
||||
|
||||
# Gracefully disable a member from a loadbalancer node:
|
||||
- apache2_mod_proxy:
|
||||
balancer_vhost: '{{ vhost_host }}'
|
||||
member_host: '{{ member.host }}'
|
||||
state: drained
|
||||
delegate_to: myloadbalancernode
|
||||
- wait_for:
|
||||
host: '{{ member.host }}'
|
||||
port: '{{ member.port }}'
|
||||
state: drained
|
||||
delegate_to: myloadbalancernode
|
||||
- apache2_mod_proxy:
|
||||
balancer_vhost: '{{ vhost_host }}'
|
||||
member_host: '{{ member.host }}'
|
||||
state: absent
|
||||
delegate_to: myloadbalancernode
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
member:
|
||||
description: specific balancer member information dictionary, returned when apache2_mod_proxy module is invoked with member_host parameter.
|
||||
type: dict
|
||||
returned: success
|
||||
sample:
|
||||
{"attributes":
|
||||
{"Busy": "0",
|
||||
"Elected": "42",
|
||||
"Factor": "1",
|
||||
"From": "136K",
|
||||
"Load": "0",
|
||||
"Route": null,
|
||||
"RouteRedir": null,
|
||||
"Set": "0",
|
||||
"Status": "Init Ok ",
|
||||
"To": " 47K",
|
||||
"Worker URL": null
|
||||
},
|
||||
"balancer_url": "http://10.10.0.2/balancer-manager/",
|
||||
"host": "10.10.0.20",
|
||||
"management_url": "http://10.10.0.2/lb/?b=mywsbalancer&w=http://10.10.0.20:8080/ws&nonce=8925436c-79c6-4841-8936-e7d13b79239b",
|
||||
"path": "/ws",
|
||||
"port": 8080,
|
||||
"protocol": "http",
|
||||
"status": {
|
||||
"disabled": false,
|
||||
"drained": false,
|
||||
"hot_standby": false,
|
||||
"ignore_errors": false
|
||||
}
|
||||
}
|
||||
members:
|
||||
description: list of member (defined above) dictionaries, returned when apache2_mod_proxy is invoked with no member_host and state args.
|
||||
returned: success
|
||||
type: list
|
||||
sample:
|
||||
[{"attributes": {
|
||||
"Busy": "0",
|
||||
"Elected": "42",
|
||||
"Factor": "1",
|
||||
"From": "136K",
|
||||
"Load": "0",
|
||||
"Route": null,
|
||||
"RouteRedir": null,
|
||||
"Set": "0",
|
||||
"Status": "Init Ok ",
|
||||
"To": " 47K",
|
||||
"Worker URL": null
|
||||
},
|
||||
"balancer_url": "http://10.10.0.2/balancer-manager/",
|
||||
"host": "10.10.0.20",
|
||||
"management_url": "http://10.10.0.2/lb/?b=mywsbalancer&w=http://10.10.0.20:8080/ws&nonce=8925436c-79c6-4841-8936-e7d13b79239b",
|
||||
"path": "/ws",
|
||||
"port": 8080,
|
||||
"protocol": "http",
|
||||
"status": {
|
||||
"disabled": false,
|
||||
"drained": false,
|
||||
"hot_standby": false,
|
||||
"ignore_errors": false
|
||||
}
|
||||
},
|
||||
{"attributes": {
|
||||
"Busy": "0",
|
||||
"Elected": "42",
|
||||
"Factor": "1",
|
||||
"From": "136K",
|
||||
"Load": "0",
|
||||
"Route": null,
|
||||
"RouteRedir": null,
|
||||
"Set": "0",
|
||||
"Status": "Init Ok ",
|
||||
"To": " 47K",
|
||||
"Worker URL": null
|
||||
},
|
||||
"balancer_url": "http://10.10.0.2/balancer-manager/",
|
||||
"host": "10.10.0.21",
|
||||
"management_url": "http://10.10.0.2/lb/?b=mywsbalancer&w=http://10.10.0.21:8080/ws&nonce=8925436c-79c6-4841-8936-e7d13b79239b",
|
||||
"path": "/ws",
|
||||
"port": 8080,
|
||||
"protocol": "http",
|
||||
"status": {
|
||||
"disabled": false,
|
||||
"drained": false,
|
||||
"hot_standby": false,
|
||||
"ignore_errors": false}
|
||||
}
|
||||
]
|
||||
'''
|
||||
|
||||
import re
|
||||
|
||||
try:
|
||||
from BeautifulSoup import BeautifulSoup
|
||||
except ImportError:
|
||||
HAS_BEAUTIFULSOUP = False
|
||||
else:
|
||||
HAS_BEAUTIFULSOUP = True
|
||||
|
||||
# balancer member attributes extraction regexp:
|
||||
EXPRESSION = r"(b=([\w\.\-]+)&w=(https?|ajp|wss?|ftp|[sf]cgi)://([\w\.\-]+):?(\d*)([/\w\.\-]*)&?[\w\-\=]*)"
|
||||
# Apache2 server version extraction regexp:
|
||||
APACHE_VERSION_EXPRESSION = r"Server Version: Apache/([\d.]+) \(([\w]+)\)"
|
||||
|
||||
def regexp_extraction(string, _regexp, groups=1):
|
||||
""" Returns the capture group (default=1) specified in the regexp, applied to the string """
|
||||
regexp_search = re.search(string=str(string), pattern=str(_regexp))
|
||||
if regexp_search:
|
||||
if regexp_search.group(groups) != '':
|
||||
return str(regexp_search.group(groups))
|
||||
return None
|
||||
|
||||
class BalancerMember(object):
|
||||
""" Apache 2.4 mod_proxy LB balancer member.
|
||||
attributes:
|
||||
read-only:
|
||||
host -> member host (string),
|
||||
management_url -> member management url (string),
|
||||
protocol -> member protocol (string)
|
||||
port -> member port (string),
|
||||
path -> member location (string),
|
||||
balancer_url -> url of this member's parent balancer (string),
|
||||
attributes -> whole member attributes (dictionary)
|
||||
module -> ansible module instance (AnsibleModule object).
|
||||
writable:
|
||||
status -> status of the member (dictionary)
|
||||
"""
|
||||
|
||||
def __init__(self, management_url, balancer_url, module):
|
||||
self.host = regexp_extraction(management_url, str(EXPRESSION), 4)
|
||||
self.management_url = str(management_url)
|
||||
self.protocol = regexp_extraction(management_url, EXPRESSION, 3)
|
||||
self.port = regexp_extraction(management_url, EXPRESSION, 5)
|
||||
self.path = regexp_extraction(management_url, EXPRESSION, 6)
|
||||
self.balancer_url = str(balancer_url)
|
||||
self.module = module
|
||||
|
||||
def get_member_attributes(self):
|
||||
""" Returns a dictionary of a balancer member's attributes."""
|
||||
|
||||
balancer_member_page = fetch_url(self.module, self.management_url)
|
||||
|
||||
try:
|
||||
assert balancer_member_page[1]['status'] == 200
|
||||
except AssertionError:
|
||||
self.module.fail_json(msg="Could not get balancer_member_page, check for connectivity! " + balancer_member_page[1])
|
||||
else:
|
||||
try:
|
||||
soup = BeautifulSoup(balancer_member_page[0])
|
||||
except TypeError:
|
||||
self.module.fail_json(msg="Cannot parse balancer_member_page HTML! " + str(soup))
|
||||
else:
|
||||
subsoup = soup.findAll('table')[1].findAll('tr')
|
||||
keys = subsoup[0].findAll('th')
|
||||
for valuesset in subsoup[1::1]:
|
||||
if re.search(pattern=self.host, string=str(valuesset)):
|
||||
values = valuesset.findAll('td')
|
||||
return dict((keys[x].string, values[x].string) for x in range(0, len(keys)))
|
||||
|
||||
def get_member_status(self):
|
||||
""" Returns a dictionary of a balancer member's status attributes."""
|
||||
status_mapping = {'disabled':'Dis',
|
||||
'drained':'Drn',
|
||||
'hot_standby':'Stby',
|
||||
'ignore_errors':'Ign'}
|
||||
status = {}
|
||||
actual_status = str(self.attributes['Status'])
|
||||
for mode in status_mapping.keys():
|
||||
if re.search(pattern=status_mapping[mode], string=actual_status):
|
||||
status[mode] = True
|
||||
else:
|
||||
status[mode] = False
|
||||
return status
|
||||
|
||||
def set_member_status(self, values):
|
||||
""" Sets a balancer member's status attributes amongst pre-mapped values."""
|
||||
values_mapping = {'disabled':'&w_status_D',
|
||||
'drained':'&w_status_N',
|
||||
'hot_standby':'&w_status_H',
|
||||
'ignore_errors':'&w_status_I'}
|
||||
|
||||
request_body = regexp_extraction(self.management_url, EXPRESSION, 1)
|
||||
for k in values_mapping.keys():
|
||||
if values[str(k)]:
|
||||
request_body = request_body + str(values_mapping[k]) + '=1'
|
||||
else:
|
||||
request_body = request_body + str(values_mapping[k]) + '=0'
|
||||
|
||||
response = fetch_url(self.module, self.management_url, data=str(request_body))
|
||||
try:
|
||||
assert response[1]['status'] == 200
|
||||
except AssertionError:
|
||||
self.module.fail_json(msg="Could not set the member status! " + self.host + " " + response[1]['status'])
|
||||
|
||||
attributes = property(get_member_attributes)
|
||||
status = property(get_member_status, set_member_status)
|
||||
|
||||
|
||||
class Balancer(object):
|
||||
""" Apache httpd 2.4 mod_proxy balancer object"""
|
||||
def __init__(self, host, suffix, module, members=None, tls=False):
|
||||
if tls:
|
||||
self.base_url = str(str('https://') + str(host))
|
||||
self.url = str(str('https://') + str(host) + str(suffix))
|
||||
else:
|
||||
self.base_url = str(str('http://') + str(host))
|
||||
self.url = str(str('http://') + str(host) + str(suffix))
|
||||
self.module = module
|
||||
self.page = self.fetch_balancer_page()
|
||||
if members is None:
|
||||
self._members = []
|
||||
|
||||
def fetch_balancer_page(self):
|
||||
""" Returns the balancer management html page as a string for later parsing."""
|
||||
page = fetch_url(self.module, str(self.url))
|
||||
try:
|
||||
assert page[1]['status'] == 200
|
||||
except AssertionError:
|
||||
self.module.fail_json(msg="Could not get balancer page! HTTP status response: " + str(page[1]['status']))
|
||||
else:
|
||||
content = page[0].read()
|
||||
apache_version = regexp_extraction(content, APACHE_VERSION_EXPRESSION, 1)
|
||||
if not re.search(pattern=r"2\.4\.[\d]*", string=apache_version):
|
||||
self.module.fail_json(msg="This module only acts on an Apache2 2.4+ instance, current Apache2 version: " + str(apache_version))
|
||||
return content
|
||||
|
||||
def get_balancer_members(self):
|
||||
""" Returns members of the balancer as a generator object for later iteration."""
|
||||
try:
|
||||
soup = BeautifulSoup(self.page)
|
||||
except TypeError:
|
||||
self.module.fail_json(msg="Cannot parse balancer page HTML! " + str(self.page))
|
||||
else:
|
||||
for element in soup.findAll('a')[1::1]:
|
||||
balancer_member_suffix = str(element.get('href'))
|
||||
try:
|
||||
assert balancer_member_suffix is not ''
|
||||
except AssertionError:
|
||||
self.module.fail_json(msg="Argument 'balancer_member_suffix' is empty!")
|
||||
else:
|
||||
yield BalancerMember(str(self.base_url + balancer_member_suffix), str(self.url), self.module)
|
||||
|
||||
members = property(get_balancer_members)
|
||||
|
||||
def main():
|
||||
""" Initiates module."""
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
balancer_vhost=dict(required=True, default=None, type='str'),
|
||||
balancer_url_suffix=dict(default="/balancer-manager/", type='str'),
|
||||
member_host=dict(type='str'),
|
||||
state=dict(type='str'),
|
||||
tls=dict(default=False, type='bool'),
|
||||
validate_certs=dict(default=True, type='bool')
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if HAS_BEAUTIFULSOUP is False:
|
||||
module.fail_json(msg="python module 'BeautifulSoup' is required!")
|
||||
|
||||
if module.params['state'] != None:
|
||||
states = module.params['state'].split(',')
|
||||
if (len(states) > 1) and (("present" in states) or ("enabled" in states)):
|
||||
module.fail_json(msg="state present/enabled is mutually exclusive with other states!")
|
||||
else:
|
||||
for _state in states:
|
||||
if _state not in ['present', 'absent', 'enabled', 'disabled', 'drained', 'hot_standby', 'ignore_errors']:
|
||||
module.fail_json(msg="State can only take values amongst 'present', 'absent', 'enabled', 'disabled', 'drained', 'hot_standby', 'ignore_errors'.")
|
||||
else:
|
||||
states = ['None']
|
||||
|
||||
mybalancer = Balancer(module.params['balancer_vhost'],
|
||||
module.params['balancer_url_suffix'],
|
||||
module=module,
|
||||
tls=module.params['tls'])
|
||||
|
||||
if module.params['member_host'] is None:
|
||||
json_output_list = []
|
||||
for member in mybalancer.members:
|
||||
json_output_list.append({
|
||||
"host": member.host,
|
||||
"status": member.status,
|
||||
"protocol": member.protocol,
|
||||
"port": member.port,
|
||||
"path": member.path,
|
||||
"attributes": member.attributes,
|
||||
"management_url": member.management_url,
|
||||
"balancer_url": member.balancer_url
|
||||
})
|
||||
module.exit_json(
|
||||
changed=False,
|
||||
members=json_output_list
|
||||
)
|
||||
else:
|
||||
changed = False
|
||||
member_exists = False
|
||||
member_status = {'disabled': False, 'drained': False, 'hot_standby': False, 'ignore_errors':False}
|
||||
for mode in member_status.keys():
|
||||
for state in states:
|
||||
if mode == state:
|
||||
member_status[mode] = True
|
||||
elif mode == 'disabled' and state == 'absent':
|
||||
member_status[mode] = True
|
||||
|
||||
for member in mybalancer.members:
|
||||
if str(member.host) == str(module.params['member_host']):
|
||||
member_exists = True
|
||||
if module.params['state'] is not None:
|
||||
member_status_before = member.status
|
||||
if not module.check_mode:
|
||||
member_status_after = member.status = member_status
|
||||
else:
|
||||
member_status_after = member_status
|
||||
if member_status_before != member_status_after:
|
||||
changed = True
|
||||
json_output = {
|
||||
"host": member.host,
|
||||
"status": member.status,
|
||||
"protocol": member.protocol,
|
||||
"port": member.port,
|
||||
"path": member.path,
|
||||
"attributes": member.attributes,
|
||||
"management_url": member.management_url,
|
||||
"balancer_url": member.balancer_url
|
||||
}
|
||||
if member_exists:
|
||||
module.exit_json(
|
||||
changed=changed,
|
||||
member=json_output
|
||||
)
|
||||
else:
|
||||
module.fail_json(msg=str(module.params['member_host']) + ' is not a member of the balancer ' + str(module.params['balancer_vhost']) + '!')
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
534
lib/ansible/modules/web_infrastructure/deploy_helper.py
Normal file
534
lib/ansible/modules/web_infrastructure/deploy_helper.py
Normal file
@@ -0,0 +1,534 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2014, Jasper N. Brouwer <jasper@nerdsweide.nl>
|
||||
# (c) 2014, Ramon de la Fuente <ramon@delafuente.nl>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
ANSIBLE_METADATA = {'status': ['preview'],
|
||||
'supported_by': 'community',
|
||||
'version': '1.0'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: deploy_helper
|
||||
version_added: "2.0"
|
||||
author: "Ramon de la Fuente (@ramondelafuente)"
|
||||
short_description: Manages some of the steps common in deploying projects.
|
||||
description:
|
||||
- The Deploy Helper manages some of the steps common in deploying software.
|
||||
It creates a folder structure, manages a symlink for the current release
|
||||
and cleans up old releases.
|
||||
- "Running it with the C(state=query) or C(state=present) will return the C(deploy_helper) fact.
|
||||
C(project_path), whatever you set in the path parameter,
|
||||
C(current_path), the path to the symlink that points to the active release,
|
||||
C(releases_path), the path to the folder to keep releases in,
|
||||
C(shared_path), the path to the folder to keep shared resources in,
|
||||
C(unfinished_filename), the file to check for to recognize unfinished builds,
|
||||
C(previous_release), the release the 'current' symlink is pointing to,
|
||||
C(previous_release_path), the full path to the 'current' symlink target,
|
||||
C(new_release), either the 'release' parameter or a generated timestamp,
|
||||
C(new_release_path), the path to the new release folder (not created by the module)."
|
||||
|
||||
options:
|
||||
path:
|
||||
required: True
|
||||
aliases: ['dest']
|
||||
description:
|
||||
- the root path of the project. Alias I(dest).
|
||||
Returned in the C(deploy_helper.project_path) fact.
|
||||
|
||||
state:
|
||||
required: False
|
||||
choices: [ present, finalize, absent, clean, query ]
|
||||
default: present
|
||||
description:
|
||||
- the state of the project.
|
||||
C(query) will only gather facts,
|
||||
C(present) will create the project I(root) folder, and in it the I(releases) and I(shared) folders,
|
||||
C(finalize) will remove the unfinished_filename file, create a symlink to the newly
|
||||
deployed release and optionally clean old releases,
|
||||
C(clean) will remove failed & old releases,
|
||||
C(absent) will remove the project folder (synonymous to the M(file) module with C(state=absent))
|
||||
|
||||
release:
|
||||
required: False
|
||||
default: None
|
||||
description:
|
||||
- the release version that is being deployed. Defaults to a timestamp format %Y%m%d%H%M%S (i.e. '20141119223359').
|
||||
This parameter is optional during C(state=present), but needs to be set explicitly for C(state=finalize).
|
||||
You can use the generated fact C(release={{ deploy_helper.new_release }}).
|
||||
|
||||
releases_path:
|
||||
required: False
|
||||
default: releases
|
||||
description:
|
||||
- the name of the folder that will hold the releases. This can be relative to C(path) or absolute.
|
||||
Returned in the C(deploy_helper.releases_path) fact.
|
||||
|
||||
shared_path:
|
||||
required: False
|
||||
default: shared
|
||||
description:
|
||||
- the name of the folder that will hold the shared resources. This can be relative to C(path) or absolute.
|
||||
If this is set to an empty string, no shared folder will be created.
|
||||
Returned in the C(deploy_helper.shared_path) fact.
|
||||
|
||||
current_path:
|
||||
required: False
|
||||
default: current
|
||||
description:
|
||||
- the name of the symlink that is created when the deploy is finalized. Used in C(finalize) and C(clean).
|
||||
Returned in the C(deploy_helper.current_path) fact.
|
||||
|
||||
unfinished_filename:
|
||||
required: False
|
||||
default: DEPLOY_UNFINISHED
|
||||
description:
|
||||
- the name of the file that indicates a deploy has not finished. All folders in the releases_path that
|
||||
contain this file will be deleted on C(state=finalize) with clean=True, or C(state=clean). This file is
|
||||
automatically deleted from the I(new_release_path) during C(state=finalize).
|
||||
|
||||
clean:
|
||||
required: False
|
||||
default: True
|
||||
description:
|
||||
- Whether to run the clean procedure in case of C(state=finalize).
|
||||
|
||||
keep_releases:
|
||||
required: False
|
||||
default: 5
|
||||
description:
|
||||
- the number of old releases to keep when cleaning. Used in C(finalize) and C(clean). Any unfinished builds
|
||||
will be deleted first, so only correct releases will count. The current version will not count.
|
||||
|
||||
notes:
|
||||
- Facts are only returned for C(state=query) and C(state=present). If you use both, you should pass any overridden
|
||||
parameters to both calls, otherwise the second call will overwrite the facts of the first one.
|
||||
- When using C(state=clean), the releases are ordered by I(creation date). You should be able to switch to a
|
||||
new naming strategy without problems.
|
||||
- Because of the default behaviour of generating the I(new_release) fact, this module will not be idempotent
|
||||
unless you pass your own release name with C(release). Due to the nature of deploying software, this should not
|
||||
be much of a problem.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
|
||||
# General explanation, starting with an example folder structure for a project:
|
||||
|
||||
root:
|
||||
releases:
|
||||
- 20140415234508
|
||||
- 20140415235146
|
||||
- 20140416082818
|
||||
|
||||
shared:
|
||||
- sessions
|
||||
- uploads
|
||||
|
||||
current: -> releases/20140416082818
|
||||
|
||||
|
||||
The 'releases' folder holds all the available releases. A release is a complete build of the application being
|
||||
deployed. This can be a clone of a repository for example, or a sync of a local folder on your filesystem.
|
||||
Having timestamped folders is one way of having distinct releases, but you could choose your own strategy like
|
||||
git tags or commit hashes.
|
||||
|
||||
During a deploy, a new folder should be created in the releases folder and any build steps required should be
|
||||
performed. Once the new build is ready, the deploy procedure is 'finalized' by replacing the 'current' symlink
|
||||
with a link to this build.
|
||||
|
||||
The 'shared' folder holds any resource that is shared between releases. Examples of this are web-server
|
||||
session files, or files uploaded by users of your application. It's quite common to have symlinks from a release
|
||||
folder pointing to a shared/subfolder, and creating these links would be automated as part of the build steps.
|
||||
|
||||
The 'current' symlink points to one of the releases. Probably the latest one, unless a deploy is in progress.
|
||||
The web-server's root for the project will go through this symlink, so the 'downtime' when switching to a new
|
||||
release is reduced to the time it takes to switch the link.
|
||||
|
||||
To distinguish between successful builds and unfinished ones, a file can be placed in the folder of the release
|
||||
that is currently in progress. The existence of this file will mark it as unfinished, and allow an automated
|
||||
procedure to remove it during cleanup.
|
||||
|
||||
|
||||
# Typical usage:
|
||||
- name: Initialize the deploy root and gather facts
|
||||
deploy_helper:
|
||||
path: /path/to/root
|
||||
- name: Clone the project to the new release folder
|
||||
git:
|
||||
repo: 'git://foosball.example.org/path/to/repo.git'
|
||||
dest: '{{ deploy_helper.new_release_path }}'
|
||||
version: 'v1.1.1'
|
||||
- name: Add an unfinished file, to allow cleanup on successful finalize
|
||||
file:
|
||||
path: '{{ deploy_helper.new_release_path }}/{{ deploy_helper.unfinished_filename }}'
|
||||
state: touch
|
||||
- name: Perform some build steps, like running your dependency manager for example
|
||||
composer:
|
||||
command: install
|
||||
working_dir: '{{ deploy_helper.new_release_path }}'
|
||||
- name: Create some folders in the shared folder
|
||||
file:
|
||||
path: '{{ deploy_helper.shared_path }}/{{ item }}'
|
||||
state: directory
|
||||
with_items:
|
||||
- sessions
|
||||
- uploads
|
||||
- name: Add symlinks from the new release to the shared folder
|
||||
file:
|
||||
path: '{{ deploy_helper.new_release_path }}/{{ item.path }}'
|
||||
src: '{{ deploy_helper.shared_path }}/{{ item.src }}'
|
||||
state: link
|
||||
with_items:
|
||||
- path: app/sessions
|
||||
src: sessions
|
||||
- path: web/uploads
|
||||
src: uploads
|
||||
- name: Finalize the deploy, removing the unfinished file and switching the symlink
|
||||
deploy_helper:
|
||||
path: /path/to/root
|
||||
release: '{{ deploy_helper.new_release }}'
|
||||
state: finalize
|
||||
|
||||
# Retrieving facts before running a deploy
|
||||
- name: Run 'state=query' to gather facts without changing anything
|
||||
deploy_helper:
|
||||
path: /path/to/root
|
||||
state: query
|
||||
# Remember to set the 'release' parameter when you actually call 'state=present' later
|
||||
- name: Initialize the deploy root
|
||||
deploy_helper:
|
||||
path: /path/to/root
|
||||
release: '{{ deploy_helper.new_release }}'
|
||||
state: present
|
||||
|
||||
# all paths can be absolute or relative (to the 'path' parameter)
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
releases_path: /var/www/project/releases
|
||||
shared_path: /var/www/shared
|
||||
current_path: /var/www/active
|
||||
|
||||
# Using your own naming strategy for releases (a version tag in this case):
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
release: 'v1.1.1'
|
||||
state: present
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
release: '{{ deploy_helper.new_release }}'
|
||||
state: finalize
|
||||
|
||||
# Using a different unfinished_filename:
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
unfinished_filename: README.md
|
||||
release: '{{ deploy_helper.new_release }}'
|
||||
state: finalize
|
||||
|
||||
# Postponing the cleanup of older builds:
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
release: '{{ deploy_helper.new_release }}'
|
||||
state: finalize
|
||||
clean: False
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
state: clean
|
||||
# Or running the cleanup ahead of the new deploy
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
state: clean
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
state: present
|
||||
|
||||
# Keeping more old releases:
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
release: '{{ deploy_helper.new_release }}'
|
||||
state: finalize
|
||||
keep_releases: 10
|
||||
# Or, if you use 'clean=false' on finalize:
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
state: clean
|
||||
keep_releases: 10
|
||||
|
||||
# Removing the entire project root folder
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
state: absent
|
||||
|
||||
# Debugging the facts returned by the module
|
||||
- deploy_helper:
|
||||
path: /path/to/root
|
||||
- debug:
|
||||
var: deploy_helper
|
||||
'''
|
||||
|
||||
# import module snippets
|
||||
from ansible.module_utils.basic import *
|
||||
from ansible.module_utils.pycompat24 import get_exception
|
||||
|
||||
class DeployHelper(object):
|
||||
|
||||
def __init__(self, module):
|
||||
module.params['path'] = os.path.expanduser(module.params['path'])
|
||||
|
||||
self.module = module
|
||||
self.file_args = module.load_file_common_arguments(module.params)
|
||||
|
||||
self.clean = module.params['clean']
|
||||
self.current_path = module.params['current_path']
|
||||
self.keep_releases = module.params['keep_releases']
|
||||
self.path = module.params['path']
|
||||
self.release = module.params['release']
|
||||
self.releases_path = module.params['releases_path']
|
||||
self.shared_path = module.params['shared_path']
|
||||
self.state = module.params['state']
|
||||
self.unfinished_filename = module.params['unfinished_filename']
|
||||
|
||||
def gather_facts(self):
|
||||
current_path = os.path.join(self.path, self.current_path)
|
||||
releases_path = os.path.join(self.path, self.releases_path)
|
||||
if self.shared_path:
|
||||
shared_path = os.path.join(self.path, self.shared_path)
|
||||
else:
|
||||
shared_path = None
|
||||
|
||||
previous_release, previous_release_path = self._get_last_release(current_path)
|
||||
|
||||
if not self.release and (self.state == 'query' or self.state == 'present'):
|
||||
self.release = time.strftime("%Y%m%d%H%M%S")
|
||||
|
||||
new_release_path = os.path.join(releases_path, self.release)
|
||||
|
||||
return {
|
||||
'project_path': self.path,
|
||||
'current_path': current_path,
|
||||
'releases_path': releases_path,
|
||||
'shared_path': shared_path,
|
||||
'previous_release': previous_release,
|
||||
'previous_release_path': previous_release_path,
|
||||
'new_release': self.release,
|
||||
'new_release_path': new_release_path,
|
||||
'unfinished_filename': self.unfinished_filename
|
||||
}
|
||||
|
||||
def delete_path(self, path):
|
||||
if not os.path.lexists(path):
|
||||
return False
|
||||
|
||||
if not os.path.isdir(path):
|
||||
self.module.fail_json(msg="%s exists but is not a directory" % path)
|
||||
|
||||
if not self.module.check_mode:
|
||||
try:
|
||||
shutil.rmtree(path, ignore_errors=False)
|
||||
except Exception:
|
||||
e = get_exception()
|
||||
self.module.fail_json(msg="rmtree failed: %s" % str(e))
|
||||
|
||||
return True
|
||||
|
||||
def create_path(self, path):
|
||||
changed = False
|
||||
|
||||
if not os.path.lexists(path):
|
||||
changed = True
|
||||
if not self.module.check_mode:
|
||||
os.makedirs(path)
|
||||
|
||||
elif not os.path.isdir(path):
|
||||
self.module.fail_json(msg="%s exists but is not a directory" % path)
|
||||
|
||||
changed += self.module.set_directory_attributes_if_different(self._get_file_args(path), changed)
|
||||
|
||||
return changed
|
||||
|
||||
def check_link(self, path):
|
||||
if os.path.lexists(path):
|
||||
if not os.path.islink(path):
|
||||
self.module.fail_json(msg="%s exists but is not a symbolic link" % path)
|
||||
|
||||
def create_link(self, source, link_name):
|
||||
changed = False
|
||||
|
||||
if os.path.islink(link_name):
|
||||
norm_link = os.path.normpath(os.path.realpath(link_name))
|
||||
norm_source = os.path.normpath(os.path.realpath(source))
|
||||
if norm_link == norm_source:
|
||||
changed = False
|
||||
else:
|
||||
changed = True
|
||||
if not self.module.check_mode:
|
||||
if not os.path.lexists(source):
|
||||
self.module.fail_json(msg="the symlink target %s doesn't exists" % source)
|
||||
tmp_link_name = link_name + '.' + self.unfinished_filename
|
||||
if os.path.islink(tmp_link_name):
|
||||
os.unlink(tmp_link_name)
|
||||
os.symlink(source, tmp_link_name)
|
||||
os.rename(tmp_link_name, link_name)
|
||||
else:
|
||||
changed = True
|
||||
if not self.module.check_mode:
|
||||
os.symlink(source, link_name)
|
||||
|
||||
return changed
|
||||
|
||||
def remove_unfinished_file(self, new_release_path):
|
||||
changed = False
|
||||
unfinished_file_path = os.path.join(new_release_path, self.unfinished_filename)
|
||||
if os.path.lexists(unfinished_file_path):
|
||||
changed = True
|
||||
if not self.module.check_mode:
|
||||
os.remove(unfinished_file_path)
|
||||
|
||||
return changed
|
||||
|
||||
def remove_unfinished_builds(self, releases_path):
|
||||
changes = 0
|
||||
|
||||
for release in os.listdir(releases_path):
|
||||
if os.path.isfile(os.path.join(releases_path, release, self.unfinished_filename)):
|
||||
if self.module.check_mode:
|
||||
changes += 1
|
||||
else:
|
||||
changes += self.delete_path(os.path.join(releases_path, release))
|
||||
|
||||
return changes
|
||||
|
||||
def remove_unfinished_link(self, path):
|
||||
changed = False
|
||||
|
||||
tmp_link_name = os.path.join(path, self.release + '.' + self.unfinished_filename)
|
||||
if not self.module.check_mode and os.path.exists(tmp_link_name):
|
||||
changed = True
|
||||
os.remove(tmp_link_name)
|
||||
|
||||
return changed
|
||||
|
||||
def cleanup(self, releases_path, reserve_version):
|
||||
changes = 0
|
||||
|
||||
if os.path.lexists(releases_path):
|
||||
releases = [ f for f in os.listdir(releases_path) if os.path.isdir(os.path.join(releases_path,f)) ]
|
||||
try:
|
||||
releases.remove(reserve_version)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if not self.module.check_mode:
|
||||
releases.sort( key=lambda x: os.path.getctime(os.path.join(releases_path,x)), reverse=True)
|
||||
for release in releases[self.keep_releases:]:
|
||||
changes += self.delete_path(os.path.join(releases_path, release))
|
||||
elif len(releases) > self.keep_releases:
|
||||
changes += (len(releases) - self.keep_releases)
|
||||
|
||||
return changes
|
||||
|
||||
def _get_file_args(self, path):
|
||||
file_args = self.file_args.copy()
|
||||
file_args['path'] = path
|
||||
return file_args
|
||||
|
||||
def _get_last_release(self, current_path):
|
||||
previous_release = None
|
||||
previous_release_path = None
|
||||
|
||||
if os.path.lexists(current_path):
|
||||
previous_release_path = os.path.realpath(current_path)
|
||||
previous_release = os.path.basename(previous_release_path)
|
||||
|
||||
return previous_release, previous_release_path
|
||||
|
||||
def main():
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec = dict(
|
||||
path = dict(aliases=['dest'], required=True, type='str'),
|
||||
release = dict(required=False, type='str', default=None),
|
||||
releases_path = dict(required=False, type='str', default='releases'),
|
||||
shared_path = dict(required=False, type='str', default='shared'),
|
||||
current_path = dict(required=False, type='str', default='current'),
|
||||
keep_releases = dict(required=False, type='int', default=5),
|
||||
clean = dict(required=False, type='bool', default=True),
|
||||
unfinished_filename = dict(required=False, type='str', default='DEPLOY_UNFINISHED'),
|
||||
state = dict(required=False, choices=['present', 'absent', 'clean', 'finalize', 'query'], default='present')
|
||||
),
|
||||
add_file_common_args = True,
|
||||
supports_check_mode = True
|
||||
)
|
||||
|
||||
deploy_helper = DeployHelper(module)
|
||||
facts = deploy_helper.gather_facts()
|
||||
|
||||
result = {
|
||||
'state': deploy_helper.state
|
||||
}
|
||||
|
||||
changes = 0
|
||||
|
||||
if deploy_helper.state == 'query':
|
||||
result['ansible_facts'] = { 'deploy_helper': facts }
|
||||
|
||||
elif deploy_helper.state == 'present':
|
||||
deploy_helper.check_link(facts['current_path'])
|
||||
changes += deploy_helper.create_path(facts['project_path'])
|
||||
changes += deploy_helper.create_path(facts['releases_path'])
|
||||
if deploy_helper.shared_path:
|
||||
changes += deploy_helper.create_path(facts['shared_path'])
|
||||
|
||||
result['ansible_facts'] = { 'deploy_helper': facts }
|
||||
|
||||
elif deploy_helper.state == 'finalize':
|
||||
if not deploy_helper.release:
|
||||
module.fail_json(msg="'release' is a required parameter for state=finalize (try the 'deploy_helper.new_release' fact)")
|
||||
if deploy_helper.keep_releases <= 0:
|
||||
module.fail_json(msg="'keep_releases' should be at least 1")
|
||||
|
||||
changes += deploy_helper.remove_unfinished_file(facts['new_release_path'])
|
||||
changes += deploy_helper.create_link(facts['new_release_path'], facts['current_path'])
|
||||
if deploy_helper.clean:
|
||||
changes += deploy_helper.remove_unfinished_link(facts['project_path'])
|
||||
changes += deploy_helper.remove_unfinished_builds(facts['releases_path'])
|
||||
changes += deploy_helper.cleanup(facts['releases_path'], facts['new_release'])
|
||||
|
||||
elif deploy_helper.state == 'clean':
|
||||
changes += deploy_helper.remove_unfinished_link(facts['project_path'])
|
||||
changes += deploy_helper.remove_unfinished_builds(facts['releases_path'])
|
||||
changes += deploy_helper.cleanup(facts['releases_path'], facts['new_release'])
|
||||
|
||||
elif deploy_helper.state == 'absent':
|
||||
# destroy the facts
|
||||
result['ansible_facts'] = { 'deploy_helper': [] }
|
||||
changes += deploy_helper.delete_path(facts['project_path'])
|
||||
|
||||
if changes > 0:
|
||||
result['changed'] = True
|
||||
else:
|
||||
result['changed'] = False
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
229
lib/ansible/modules/web_infrastructure/ejabberd_user.py
Normal file
229
lib/ansible/modules/web_infrastructure/ejabberd_user.py
Normal file
@@ -0,0 +1,229 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2013, Peter Sprygada <sprygada@gmail.com>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
ANSIBLE_METADATA = {'status': ['preview'],
|
||||
'supported_by': 'community',
|
||||
'version': '1.0'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: ejabberd_user
|
||||
version_added: "1.5"
|
||||
author: "Peter Sprygada (@privateip)"
|
||||
short_description: Manages users for ejabberd servers
|
||||
requirements:
|
||||
- ejabberd with mod_admin_extra
|
||||
description:
|
||||
- This module provides user management for ejabberd servers
|
||||
options:
|
||||
username:
|
||||
description:
|
||||
- the name of the user to manage
|
||||
required: true
|
||||
host:
|
||||
description:
|
||||
- the ejabberd host associated with this username
|
||||
required: true
|
||||
password:
|
||||
description:
|
||||
- the password to assign to the username
|
||||
required: false
|
||||
logging:
|
||||
description:
|
||||
- enables or disables the local syslog facility for this module
|
||||
required: false
|
||||
default: false
|
||||
choices: [ 'true', 'false', 'yes', 'no' ]
|
||||
state:
|
||||
description:
|
||||
- describe the desired state of the user to be managed
|
||||
required: false
|
||||
default: 'present'
|
||||
choices: [ 'present', 'absent' ]
|
||||
notes:
|
||||
- Password parameter is required for state == present only
|
||||
- Passwords must be stored in clear text for this release
|
||||
- The ejabberd configuration file must include mod_admin_extra as a module.
|
||||
'''
|
||||
EXAMPLES = '''
|
||||
Example playbook entries using the ejabberd_user module to manage users state.
|
||||
|
||||
- name: create a user if it does not exists
|
||||
ejabberd_user:
|
||||
username: test
|
||||
host: server
|
||||
password: password
|
||||
|
||||
- name: delete a user if it exists
|
||||
ejabberd_user:
|
||||
username: test
|
||||
host: server
|
||||
state: absent
|
||||
'''
|
||||
|
||||
import syslog
|
||||
from ansible.module_utils.pycompat24 import get_exception
|
||||
from ansible.module_utils.basic import *
|
||||
|
||||
class EjabberdUserException(Exception):
|
||||
""" Base exeption for EjabberdUser class object """
|
||||
pass
|
||||
|
||||
class EjabberdUser(object):
|
||||
""" This object represents a user resource for an ejabberd server. The
|
||||
object manages user creation and deletion using ejabberdctl. The following
|
||||
commands are currently supported:
|
||||
* ejabberdctl register
|
||||
* ejabberdctl deregister
|
||||
"""
|
||||
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
self.logging = module.params.get('logging')
|
||||
self.state = module.params.get('state')
|
||||
self.host = module.params.get('host')
|
||||
self.user = module.params.get('username')
|
||||
self.pwd = module.params.get('password')
|
||||
|
||||
@property
|
||||
def changed(self):
|
||||
""" This method will check the current user and see if the password has
|
||||
changed. It will return True if the user does not match the supplied
|
||||
credentials and False if it does not
|
||||
"""
|
||||
try:
|
||||
options = [self.user, self.host, self.pwd]
|
||||
(rc, out, err) = self.run_command('check_password', options)
|
||||
except EjabberdUserException:
|
||||
e = get_exception()
|
||||
(rc, out, err) = (1, None, "required attribute(s) missing")
|
||||
return rc
|
||||
|
||||
@property
|
||||
def exists(self):
|
||||
""" This method will check to see if the supplied username exists for
|
||||
host specified. If the user exists True is returned, otherwise False
|
||||
is returned
|
||||
"""
|
||||
try:
|
||||
options = [self.user, self.host]
|
||||
(rc, out, err) = self.run_command('check_account', options)
|
||||
except EjabberdUserException:
|
||||
e = get_exception()
|
||||
(rc, out, err) = (1, None, "required attribute(s) missing")
|
||||
return not bool(int(rc))
|
||||
|
||||
def log(self, entry):
|
||||
""" This method will log information to the local syslog facility """
|
||||
if self.logging:
|
||||
syslog.openlog('ansible-%s' % self.module._name)
|
||||
syslog.syslog(syslog.LOG_NOTICE, entry)
|
||||
|
||||
def run_command(self, cmd, options):
|
||||
""" This method will run the any command specified and return the
|
||||
returns using the Ansible common module
|
||||
"""
|
||||
if not all(options):
|
||||
raise EjabberdUserException
|
||||
|
||||
cmd = 'ejabberdctl %s ' % cmd
|
||||
cmd += " ".join(options)
|
||||
self.log('command: %s' % cmd)
|
||||
return self.module.run_command(cmd.split())
|
||||
|
||||
def update(self):
|
||||
""" The update method will update the credentials for the user provided
|
||||
"""
|
||||
try:
|
||||
options = [self.user, self.host, self.pwd]
|
||||
(rc, out, err) = self.run_command('change_password', options)
|
||||
except EjabberdUserException:
|
||||
e = get_exception()
|
||||
(rc, out, err) = (1, None, "required attribute(s) missing")
|
||||
return (rc, out, err)
|
||||
|
||||
def create(self):
|
||||
""" The create method will create a new user on the host with the
|
||||
password provided
|
||||
"""
|
||||
try:
|
||||
options = [self.user, self.host, self.pwd]
|
||||
(rc, out, err) = self.run_command('register', options)
|
||||
except EjabberdUserException:
|
||||
e = get_exception()
|
||||
(rc, out, err) = (1, None, "required attribute(s) missing")
|
||||
return (rc, out, err)
|
||||
|
||||
def delete(self):
|
||||
""" The delete method will delete the user from the host
|
||||
"""
|
||||
try:
|
||||
options = [self.user, self.host]
|
||||
(rc, out, err) = self.run_command('unregister', options)
|
||||
except EjabberdUserException:
|
||||
e = get_exception()
|
||||
(rc, out, err) = (1, None, "required attribute(s) missing")
|
||||
return (rc, out, err)
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec = dict(
|
||||
host=dict(default=None, type='str'),
|
||||
username=dict(default=None, type='str'),
|
||||
password=dict(default=None, type='str'),
|
||||
state=dict(default='present', choices=['present', 'absent']),
|
||||
logging=dict(default=False, type='bool')
|
||||
),
|
||||
supports_check_mode = True
|
||||
)
|
||||
|
||||
obj = EjabberdUser(module)
|
||||
|
||||
rc = None
|
||||
result = dict()
|
||||
|
||||
if obj.state == 'absent':
|
||||
if obj.exists:
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
(rc, out, err) = obj.delete()
|
||||
if rc != 0:
|
||||
module.fail_json(msg=err, rc=rc)
|
||||
|
||||
elif obj.state == 'present':
|
||||
if not obj.exists:
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
(rc, out, err) = obj.create()
|
||||
elif obj.changed:
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
(rc, out, err) = obj.update()
|
||||
if rc is not None and rc != 0:
|
||||
module.fail_json(msg=err, rc=rc)
|
||||
|
||||
if rc is None:
|
||||
result['changed'] = False
|
||||
else:
|
||||
result['changed'] = True
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
156
lib/ansible/modules/web_infrastructure/jboss.py
Normal file
156
lib/ansible/modules/web_infrastructure/jboss.py
Normal file
@@ -0,0 +1,156 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, Jeroen Hoekx <jeroen.hoekx@dsquare.be>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
ANSIBLE_METADATA = {'status': ['preview'],
|
||||
'supported_by': 'community',
|
||||
'version': '1.0'}
|
||||
|
||||
DOCUMENTATION = """
|
||||
module: jboss
|
||||
version_added: "1.4"
|
||||
short_description: deploy applications to JBoss
|
||||
description:
|
||||
- Deploy applications to JBoss standalone using the filesystem
|
||||
options:
|
||||
deployment:
|
||||
required: true
|
||||
description:
|
||||
- The name of the deployment
|
||||
src:
|
||||
required: false
|
||||
description:
|
||||
- The remote path of the application ear or war to deploy
|
||||
deploy_path:
|
||||
required: false
|
||||
default: /var/lib/jbossas/standalone/deployments
|
||||
description:
|
||||
- The location in the filesystem where the deployment scanner listens
|
||||
state:
|
||||
required: false
|
||||
choices: [ present, absent ]
|
||||
default: "present"
|
||||
description:
|
||||
- Whether the application should be deployed or undeployed
|
||||
notes:
|
||||
- "The JBoss standalone deployment-scanner has to be enabled in standalone.xml"
|
||||
- "Ensure no identically named application is deployed through the JBoss CLI"
|
||||
author: "Jeroen Hoekx (@jhoekx)"
|
||||
"""
|
||||
|
||||
EXAMPLES = """
|
||||
# Deploy a hello world application
|
||||
- jboss:
|
||||
src: /tmp/hello-1.0-SNAPSHOT.war
|
||||
deployment: hello.war
|
||||
state: present
|
||||
|
||||
# Update the hello world application
|
||||
- jboss:
|
||||
src: /tmp/hello-1.1-SNAPSHOT.war
|
||||
deployment: hello.war
|
||||
state: present
|
||||
|
||||
# Undeploy the hello world application
|
||||
- jboss:
|
||||
deployment: hello.war
|
||||
state: absent
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
|
||||
def is_deployed(deploy_path, deployment):
|
||||
return os.path.exists(os.path.join(deploy_path, "%s.deployed"%(deployment)))
|
||||
|
||||
def is_undeployed(deploy_path, deployment):
|
||||
return os.path.exists(os.path.join(deploy_path, "%s.undeployed"%(deployment)))
|
||||
|
||||
def is_failed(deploy_path, deployment):
|
||||
return os.path.exists(os.path.join(deploy_path, "%s.failed"%(deployment)))
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec = dict(
|
||||
src=dict(),
|
||||
deployment=dict(required=True),
|
||||
deploy_path=dict(default='/var/lib/jbossas/standalone/deployments'),
|
||||
state=dict(choices=['absent', 'present'], default='present'),
|
||||
),
|
||||
)
|
||||
|
||||
changed = False
|
||||
|
||||
src = module.params['src']
|
||||
deployment = module.params['deployment']
|
||||
deploy_path = module.params['deploy_path']
|
||||
state = module.params['state']
|
||||
|
||||
if state == 'present' and not src:
|
||||
module.fail_json(msg="Argument 'src' required.")
|
||||
|
||||
if not os.path.exists(deploy_path):
|
||||
module.fail_json(msg="deploy_path does not exist.")
|
||||
|
||||
deployed = is_deployed(deploy_path, deployment)
|
||||
|
||||
if state == 'present' and not deployed:
|
||||
if not os.path.exists(src):
|
||||
module.fail_json(msg='Source file %s does not exist.'%(src))
|
||||
if is_failed(deploy_path, deployment):
|
||||
### Clean up old failed deployment
|
||||
os.remove(os.path.join(deploy_path, "%s.failed"%(deployment)))
|
||||
|
||||
shutil.copyfile(src, os.path.join(deploy_path, deployment))
|
||||
while not deployed:
|
||||
deployed = is_deployed(deploy_path, deployment)
|
||||
if is_failed(deploy_path, deployment):
|
||||
module.fail_json(msg='Deploying %s failed.'%(deployment))
|
||||
time.sleep(1)
|
||||
changed = True
|
||||
|
||||
if state == 'present' and deployed:
|
||||
if module.sha1(src) != module.sha1(os.path.join(deploy_path, deployment)):
|
||||
os.remove(os.path.join(deploy_path, "%s.deployed"%(deployment)))
|
||||
shutil.copyfile(src, os.path.join(deploy_path, deployment))
|
||||
deployed = False
|
||||
while not deployed:
|
||||
deployed = is_deployed(deploy_path, deployment)
|
||||
if is_failed(deploy_path, deployment):
|
||||
module.fail_json(msg='Deploying %s failed.'%(deployment))
|
||||
time.sleep(1)
|
||||
changed = True
|
||||
|
||||
if state == 'absent' and deployed:
|
||||
os.remove(os.path.join(deploy_path, "%s.deployed"%(deployment)))
|
||||
while deployed:
|
||||
deployed = not is_undeployed(deploy_path, deployment)
|
||||
if is_failed(deploy_path, deployment):
|
||||
module.fail_json(msg='Undeploying %s failed.'%(deployment))
|
||||
time.sleep(1)
|
||||
changed = True
|
||||
|
||||
module.exit_json(changed=changed)
|
||||
|
||||
# import module snippets
|
||||
from ansible.module_utils.basic import *
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
362
lib/ansible/modules/web_infrastructure/jenkins_job.py
Normal file
362
lib/ansible/modules/web_infrastructure/jenkins_job.py
Normal file
@@ -0,0 +1,362 @@
|
||||
#!/usr/bin/python
|
||||
#
|
||||
# This is a free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This Ansible library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this library. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
ANSIBLE_METADATA = {'status': ['preview'],
|
||||
'supported_by': 'community',
|
||||
'version': '1.0'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: jenkins_job
|
||||
short_description: Manage jenkins jobs
|
||||
description:
|
||||
- Manage Jenkins jobs by using Jenkins REST API.
|
||||
requirements:
|
||||
- "python-jenkins >= 0.4.12"
|
||||
- "lxml >= 3.3.3"
|
||||
version_added: "2.2"
|
||||
author: "Sergio Millan Rodriguez (@sermilrod)"
|
||||
options:
|
||||
config:
|
||||
description:
|
||||
- config in XML format.
|
||||
- Required if job does not yet exist.
|
||||
- Mututally exclusive with C(enabled).
|
||||
- Considered if C(state=present).
|
||||
required: false
|
||||
enabled:
|
||||
description:
|
||||
- Whether the job should be enabled or disabled.
|
||||
- Mututally exclusive with C(config).
|
||||
- Considered if C(state=present).
|
||||
required: false
|
||||
name:
|
||||
description:
|
||||
- Name of the Jenkins job.
|
||||
required: true
|
||||
password:
|
||||
description:
|
||||
- Password to authenticate with the Jenkins server.
|
||||
required: false
|
||||
state:
|
||||
description:
|
||||
- Attribute that specifies if the job has to be created or deleted.
|
||||
required: false
|
||||
default: present
|
||||
choices: ['present', 'absent']
|
||||
token:
|
||||
description:
|
||||
- API token used to authenticate alternatively to password.
|
||||
required: false
|
||||
url:
|
||||
description:
|
||||
- Url where the Jenkins server is accessible.
|
||||
required: false
|
||||
default: http://localhost:8080
|
||||
user:
|
||||
description:
|
||||
- User to authenticate with the Jenkins server.
|
||||
required: false
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Create a jenkins job using basic authentication
|
||||
- jenkins_job:
|
||||
config: "{{ lookup('file', 'templates/test.xml') }}"
|
||||
name: test
|
||||
password: admin
|
||||
url: "http://localhost:8080"
|
||||
user: admin
|
||||
|
||||
# Create a jenkins job using the token
|
||||
- jenkins_job:
|
||||
config: "{{ lookup('template', 'templates/test.xml.j2') }}"
|
||||
name: test
|
||||
token: asdfasfasfasdfasdfadfasfasdfasdfc
|
||||
url: "http://localhost:8080"
|
||||
user: admin
|
||||
|
||||
# Delete a jenkins job using basic authentication
|
||||
- jenkins_job:
|
||||
name: test
|
||||
password: admin
|
||||
state: absent
|
||||
url: "http://localhost:8080"
|
||||
user: admin
|
||||
|
||||
# Delete a jenkins job using the token
|
||||
- jenkins_job:
|
||||
name: test
|
||||
token: asdfasfasfasdfasdfadfasfasdfasdfc
|
||||
state: absent
|
||||
url: "http://localhost:8080"
|
||||
user: admin
|
||||
|
||||
# Disable a jenkins job using basic authentication
|
||||
- jenkins_job:
|
||||
name: test
|
||||
password: admin
|
||||
enabled: false
|
||||
url: "http://localhost:8080"
|
||||
user: admin
|
||||
|
||||
# Disable a jenkins job using the token
|
||||
- jenkins_job:
|
||||
name: test
|
||||
token: asdfasfasfasdfasdfadfasfasdfasdfc
|
||||
enabled: false
|
||||
url: "http://localhost:8080"
|
||||
user: admin
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
---
|
||||
name:
|
||||
description: Name of the jenkins job.
|
||||
returned: success
|
||||
type: string
|
||||
sample: test-job
|
||||
state:
|
||||
description: State of the jenkins job.
|
||||
returned: success
|
||||
type: string
|
||||
sample: present
|
||||
enabled:
|
||||
description: Whether the jenkins job is enabled or not.
|
||||
returned: success
|
||||
type: bool
|
||||
sample: true
|
||||
user:
|
||||
description: User used for authentication.
|
||||
returned: success
|
||||
type: string
|
||||
sample: admin
|
||||
url:
|
||||
description: Url to connect to the Jenkins server.
|
||||
returned: success
|
||||
type: string
|
||||
sample: https://jenkins.mydomain.com
|
||||
'''
|
||||
|
||||
try:
|
||||
import jenkins
|
||||
python_jenkins_installed = True
|
||||
except ImportError:
|
||||
python_jenkins_installed = False
|
||||
|
||||
try:
|
||||
from lxml import etree as ET
|
||||
python_lxml_installed = True
|
||||
except ImportError:
|
||||
python_lxml_installed = False
|
||||
|
||||
class JenkinsJob:
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
|
||||
self.config = module.params.get('config')
|
||||
self.name = module.params.get('name')
|
||||
self.password = module.params.get('password')
|
||||
self.state = module.params.get('state')
|
||||
self.enabled = module.params.get('enabled')
|
||||
self.token = module.params.get('token')
|
||||
self.user = module.params.get('user')
|
||||
self.jenkins_url = module.params.get('url')
|
||||
self.server = self.get_jenkins_connection()
|
||||
|
||||
self.result = {
|
||||
'changed': False,
|
||||
'url': self.jenkins_url,
|
||||
'name': self.name,
|
||||
'user': self.user,
|
||||
'state': self.state,
|
||||
'diff': {
|
||||
'before': "",
|
||||
'after': ""
|
||||
}
|
||||
}
|
||||
|
||||
def get_jenkins_connection(self):
|
||||
try:
|
||||
if (self.user and self.password):
|
||||
return jenkins.Jenkins(self.jenkins_url, self.user, self.password)
|
||||
elif (self.user and self.token):
|
||||
return jenkins.Jenkins(self.jenkins_url, self.user, self.token)
|
||||
elif (self.user and not (self.password or self.token)):
|
||||
return jenkins.Jenkins(self.jenkins_url, self.user)
|
||||
else:
|
||||
return jenkins.Jenkins(self.jenkins_url)
|
||||
except Exception:
|
||||
e = get_exception()
|
||||
self.module.fail_json(msg='Unable to connect to Jenkins server, %s' % str(e))
|
||||
|
||||
def get_job_status(self):
|
||||
try:
|
||||
return self.server.get_job_info(self.name)['color'].encode('utf-8')
|
||||
except Exception:
|
||||
e = get_exception()
|
||||
self.module.fail_json(msg='Unable to fetch job information, %s' % str(e))
|
||||
|
||||
def job_exists(self):
|
||||
try:
|
||||
return bool(self.server.job_exists(self.name))
|
||||
except Exception:
|
||||
e = get_exception()
|
||||
self.module.fail_json(msg='Unable to validate if job exists, %s for %s' % (str(e), self.jenkins_url))
|
||||
|
||||
def get_config(self):
|
||||
return job_config_to_string(self.config)
|
||||
|
||||
def get_current_config(self):
|
||||
return job_config_to_string(self.server.get_job_config(self.name).encode('utf-8'))
|
||||
|
||||
def has_config_changed(self):
|
||||
# config is optional, if not provided we keep the current config as is
|
||||
if self.config is None:
|
||||
return False
|
||||
|
||||
config_file = self.get_config()
|
||||
machine_file = self.get_current_config()
|
||||
|
||||
self.result['diff']['after'] = config_file
|
||||
self.result['diff']['before'] = machine_file
|
||||
|
||||
if machine_file != config_file:
|
||||
return True
|
||||
return False
|
||||
|
||||
def present_job(self):
|
||||
if self.config is None and self.enabled is None:
|
||||
self.module.fail_json(msg='one of the following params is required on state=present: config,enabled')
|
||||
|
||||
if not self.job_exists():
|
||||
self.create_job()
|
||||
else:
|
||||
self.update_job()
|
||||
|
||||
def has_state_changed(self, status):
|
||||
# Keep in current state if enabled arg_spec is not given
|
||||
if self.enabled is None:
|
||||
return False
|
||||
|
||||
if ( (self.enabled == False and status != "disabled") or (self.enabled == True and status == "disabled") ):
|
||||
return True
|
||||
return False
|
||||
|
||||
def switch_state(self):
|
||||
if self.enabled == False:
|
||||
self.server.disable_job(self.name)
|
||||
else:
|
||||
self.server.enable_job(self.name)
|
||||
|
||||
def update_job(self):
|
||||
try:
|
||||
status = self.get_job_status()
|
||||
|
||||
# Handle job config
|
||||
if self.has_config_changed():
|
||||
self.result['changed'] = True
|
||||
if not self.module.check_mode:
|
||||
self.server.reconfig_job(self.name, self.get_config())
|
||||
|
||||
# Handle job disable/enable
|
||||
elif self.has_state_changed(status):
|
||||
self.result['changed'] = True
|
||||
if not self.module.check_mode:
|
||||
self.switch_state()
|
||||
|
||||
except Exception:
|
||||
e = get_exception()
|
||||
self.module.fail_json(msg='Unable to reconfigure job, %s for %s' % (str(e), self.jenkins_url))
|
||||
|
||||
def create_job(self):
|
||||
if self.config is None:
|
||||
self.module.fail_json(msg='missing required param: config')
|
||||
|
||||
self.result['changed'] = True
|
||||
try:
|
||||
config_file = self.get_config()
|
||||
self.result['diff']['after'] = config_file
|
||||
if not self.module.check_mode:
|
||||
self.server.create_job(self.name, config_file)
|
||||
except Exception:
|
||||
e = get_exception()
|
||||
self.module.fail_json(msg='Unable to create job, %s for %s' % (str(e), self.jenkins_url))
|
||||
|
||||
def absent_job(self):
|
||||
if self.job_exists():
|
||||
self.result['changed'] = True
|
||||
self.result['diff']['before'] = self.get_current_config()
|
||||
if not self.module.check_mode:
|
||||
try:
|
||||
self.server.delete_job(self.name)
|
||||
except Exception:
|
||||
e = get_exception()
|
||||
self.module.fail_json(msg='Unable to delete job, %s for %s' % (str(e), self.jenkins_url))
|
||||
|
||||
def get_result(self):
|
||||
result = self.result
|
||||
if self.job_exists():
|
||||
result['enabled'] = self.get_job_status() != "disabled"
|
||||
else:
|
||||
result['enabled'] = None
|
||||
return result
|
||||
|
||||
def test_dependencies(module):
|
||||
if not python_jenkins_installed:
|
||||
module.fail_json(msg="python-jenkins required for this module. "\
|
||||
"see http://python-jenkins.readthedocs.io/en/latest/install.html")
|
||||
|
||||
if not python_lxml_installed:
|
||||
module.fail_json(msg="lxml required for this module. "\
|
||||
"see http://lxml.de/installation.html")
|
||||
|
||||
def job_config_to_string(xml_str):
|
||||
return ET.tostring(ET.fromstring(xml_str))
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec = dict(
|
||||
config = dict(required=False),
|
||||
name = dict(required=True),
|
||||
password = dict(required=False, no_log=True),
|
||||
state = dict(required=False, choices=['present', 'absent'], default="present"),
|
||||
enabled = dict(required=False, type='bool'),
|
||||
token = dict(required=False, no_log=True),
|
||||
url = dict(required=False, default="http://localhost:8080"),
|
||||
user = dict(required=False)
|
||||
),
|
||||
mutually_exclusive = [
|
||||
['password', 'token'],
|
||||
['config', 'enabled'],
|
||||
],
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
test_dependencies(module)
|
||||
jenkins_job = JenkinsJob(module)
|
||||
|
||||
if module.params.get('state') == "present":
|
||||
jenkins_job.present_job()
|
||||
else:
|
||||
jenkins_job.absent_job()
|
||||
|
||||
result = jenkins_job.get_result()
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
from ansible.module_utils.basic import *
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
833
lib/ansible/modules/web_infrastructure/jenkins_plugin.py
Normal file
833
lib/ansible/modules/web_infrastructure/jenkins_plugin.py
Normal file
@@ -0,0 +1,833 @@
|
||||
#!/usr/bin/python
|
||||
# encoding: utf-8
|
||||
|
||||
# (c) 2016, Jiri Tyr <jiri.tyr@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.pycompat24 import get_exception
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
from ansible.module_utils.urls import url_argument_spec
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
import time
|
||||
import urllib
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'status': ['preview'],
|
||||
'supported_by': 'community',
|
||||
'version': '1.0'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: jenkins_plugin
|
||||
author: Jiri Tyr (@jtyr)
|
||||
version_added: '2.2'
|
||||
short_description: Add or remove Jenkins plugin
|
||||
description:
|
||||
- Ansible module which helps to manage Jenkins plugins.
|
||||
|
||||
options:
|
||||
group:
|
||||
required: false
|
||||
default: jenkins
|
||||
description:
|
||||
- Name of the Jenkins group on the OS.
|
||||
jenkins_home:
|
||||
required: false
|
||||
default: /var/lib/jenkins
|
||||
description:
|
||||
- Home directory of the Jenkins user.
|
||||
mode:
|
||||
required: false
|
||||
default: '0664'
|
||||
description:
|
||||
- File mode applied on versioned plugins.
|
||||
name:
|
||||
required: true
|
||||
description:
|
||||
- Plugin name.
|
||||
owner:
|
||||
required: false
|
||||
default: jenkins
|
||||
description:
|
||||
- Name of the Jenkins user on the OS.
|
||||
params:
|
||||
required: false
|
||||
default: null
|
||||
description:
|
||||
- Option used to allow the user to overwrite any of the other options. To
|
||||
remove an option, set the value of the option to C(null).
|
||||
state:
|
||||
required: false
|
||||
choices: [absent, present, pinned, unpinned, enabled, disabled, latest]
|
||||
default: present
|
||||
description:
|
||||
- Desired plugin state.
|
||||
- If the C(latest) is set, the check for new version will be performed
|
||||
every time. This is suitable to keep the plugin up-to-date.
|
||||
timeout:
|
||||
required: false
|
||||
default: 30
|
||||
description:
|
||||
- Server connection timeout in secs.
|
||||
updates_expiration:
|
||||
required: false
|
||||
default: 86400
|
||||
description:
|
||||
- Number of seconds after which a new copy of the I(update-center.json)
|
||||
file is downloaded. This is used to avoid the need to download the
|
||||
plugin to calculate its checksum when C(latest) is specified.
|
||||
- Set it to C(0) if no cache file should be used. In that case, the
|
||||
plugin file will always be downloaded to calculate its checksum when
|
||||
C(latest) is specified.
|
||||
updates_url:
|
||||
required: false
|
||||
default: https://updates.jenkins-ci.org
|
||||
description:
|
||||
- URL of the Update Centre.
|
||||
- Used as the base URL to download the plugins and the
|
||||
I(update-center.json) JSON file.
|
||||
url:
|
||||
required: false
|
||||
default: http://localhost:8080
|
||||
description:
|
||||
- URL of the Jenkins server.
|
||||
version:
|
||||
required: false
|
||||
default: null
|
||||
description:
|
||||
- Plugin version number.
|
||||
- If this option is specified, all plugin dependencies must be installed
|
||||
manually.
|
||||
- It might take longer to verify that the correct version is installed.
|
||||
This is especially true if a specific version number is specified.
|
||||
with_dependencies:
|
||||
required: false
|
||||
choices: ['yes', 'no']
|
||||
default: 'yes'
|
||||
description:
|
||||
- Defines whether to install plugin dependencies.
|
||||
|
||||
notes:
|
||||
- Plugin installation shoud be run under root or the same user which owns
|
||||
the plugin files on the disk. Only if the plugin is not installed yet and
|
||||
no version is specified, the API installation is performed which requires
|
||||
only the Web UI credentials.
|
||||
- It's necessary to notify the handler or call the I(service) module to
|
||||
restart the Jenkins service after a new plugin was installed.
|
||||
- Pinning works only if the plugin is installed and Jenkis service was
|
||||
successfully restarted after the plugin installation.
|
||||
- It is not possible to run the module remotely by changing the I(url)
|
||||
parameter to point to the Jenkins server. The module must be used on the
|
||||
host where Jenkins runs as it needs direct access to the plugin files.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Install plugin
|
||||
jenkins_plugin:
|
||||
name: build-pipeline-plugin
|
||||
|
||||
- name: Install plugin without its dependencies
|
||||
jenkins_plugin:
|
||||
name: build-pipeline-plugin
|
||||
with_dependencies: no
|
||||
|
||||
- name: Make sure the plugin is always up-to-date
|
||||
jenkins_plugin:
|
||||
name: token-macro
|
||||
state: latest
|
||||
|
||||
- name: Install specific version of the plugin
|
||||
jenkins_plugin:
|
||||
name: token-macro
|
||||
version: 1.15
|
||||
|
||||
- name: Pin the plugin
|
||||
jenkins_plugin:
|
||||
name: token-macro
|
||||
state: pinned
|
||||
|
||||
- name: Unpin the plugin
|
||||
jenkins_plugin:
|
||||
name: token-macro
|
||||
state: unpinned
|
||||
|
||||
- name: Enable the plugin
|
||||
jenkins_plugin:
|
||||
name: token-macro
|
||||
state: enabled
|
||||
|
||||
- name: Disable the plugin
|
||||
jenkins_plugin:
|
||||
name: token-macro
|
||||
state: disabled
|
||||
|
||||
- name: Uninstall plugin
|
||||
jenkins_plugin:
|
||||
name: build-pipeline-plugin
|
||||
state: absent
|
||||
|
||||
#
|
||||
# Example of how to use the params
|
||||
#
|
||||
# Define a variable and specify all default parameters you want to use across
|
||||
# all jenkins_plugin calls:
|
||||
#
|
||||
# my_jenkins_params:
|
||||
# url_username: admin
|
||||
# url_password: p4ssw0rd
|
||||
# url: http://localhost:8888
|
||||
#
|
||||
- name: Install plugin
|
||||
jenkins_plugin:
|
||||
name: build-pipeline-plugin
|
||||
params: "{{ my_jenkins_params }}"
|
||||
|
||||
#
|
||||
# Example of a Play which handles Jenkins restarts during the state changes
|
||||
#
|
||||
- name: Jenkins Master play
|
||||
hosts: jenkins-master
|
||||
vars:
|
||||
my_jenkins_plugins:
|
||||
token-macro:
|
||||
enabled: yes
|
||||
build-pipeline-plugin:
|
||||
version: 1.4.9
|
||||
pinned: no
|
||||
enabled: yes
|
||||
tasks:
|
||||
- name: Install plugins without a specific version
|
||||
jenkins_plugin:
|
||||
name: "{{ item.key }}"
|
||||
register: my_jenkins_plugin_unversioned
|
||||
when: >
|
||||
'version' not in item.value
|
||||
with_dict: "{{ my_jenkins_plugins }}"
|
||||
|
||||
- name: Install plugins with a specific version
|
||||
jenkins_plugin:
|
||||
name: "{{ item.key }}"
|
||||
version: "{{ item.value['version'] }}"
|
||||
register: my_jenkins_plugin_versioned
|
||||
when: >
|
||||
'version' in item.value
|
||||
with_dict: "{{ my_jenkins_plugins }}"
|
||||
|
||||
- name: Initiate the fact
|
||||
set_fact:
|
||||
jenkins_restart_required: no
|
||||
|
||||
- name: Check if restart is required by any of the versioned plugins
|
||||
set_fact:
|
||||
jenkins_restart_required: yes
|
||||
when: item.changed
|
||||
with_items: "{{ my_jenkins_plugin_versioned.results }}"
|
||||
|
||||
- name: Check if restart is required by any of the unversioned plugins
|
||||
set_fact:
|
||||
jenkins_restart_required: yes
|
||||
when: item.changed
|
||||
with_items: "{{ my_jenkins_plugin_unversioned.results }}"
|
||||
|
||||
- name: Restart Jenkins if required
|
||||
service:
|
||||
name: jenkins
|
||||
state: restarted
|
||||
when: jenkins_restart_required
|
||||
|
||||
- name: Wait for Jenkins to start up
|
||||
uri:
|
||||
url: http://localhost:8080
|
||||
status_code: 200
|
||||
timeout: 5
|
||||
register: jenkins_service_status
|
||||
# Keep trying for 5 mins in 5 sec intervals
|
||||
retries: 60
|
||||
delay: 5
|
||||
until: >
|
||||
'status' in jenkins_service_status and
|
||||
jenkins_service_status['status'] == 200
|
||||
when: jenkins_restart_required
|
||||
|
||||
- name: Reset the fact
|
||||
set_fact:
|
||||
jenkins_restart_required: no
|
||||
when: jenkins_restart_required
|
||||
|
||||
- name: Plugin pinning
|
||||
jenkins_plugin:
|
||||
name: "{{ item.key }}"
|
||||
state: "{{ 'pinned' if item.value['pinned'] else 'unpinned'}}"
|
||||
when: >
|
||||
'pinned' in item.value
|
||||
with_dict: "{{ my_jenkins_plugins }}"
|
||||
|
||||
- name: Plugin enabling
|
||||
jenkins_plugin:
|
||||
name: "{{ item.key }}"
|
||||
state: "{{ 'enabled' if item.value['enabled'] else 'disabled'}}"
|
||||
when: >
|
||||
'enabled' in item.value
|
||||
with_dict: "{{ my_jenkins_plugins }}"
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
plugin:
|
||||
description: plugin name
|
||||
returned: success
|
||||
type: string
|
||||
sample: build-pipeline-plugin
|
||||
state:
|
||||
description: state of the target, after execution
|
||||
returned: success
|
||||
type: string
|
||||
sample: "present"
|
||||
'''
|
||||
|
||||
|
||||
class JenkinsPlugin(object):
|
||||
def __init__(self, module):
|
||||
# To be able to call fail_json
|
||||
self.module = module
|
||||
|
||||
# Shortcuts for the params
|
||||
self.params = self.module.params
|
||||
self.url = self.params['url']
|
||||
self.timeout = self.params['timeout']
|
||||
|
||||
# Crumb
|
||||
self.crumb = {}
|
||||
|
||||
if self._csrf_enabled():
|
||||
self.crumb = self._get_crumb()
|
||||
|
||||
# Get list of installed plugins
|
||||
self._get_installed_plugins()
|
||||
|
||||
def _csrf_enabled(self):
|
||||
csrf_data = self._get_json_data(
|
||||
"%s/%s" % (self.url, "api/json"), 'CSRF')
|
||||
|
||||
return csrf_data["useCrumbs"]
|
||||
|
||||
def _get_json_data(self, url, what, **kwargs):
|
||||
# Get the JSON data
|
||||
r = self._get_url_data(url, what, **kwargs)
|
||||
|
||||
# Parse the JSON data
|
||||
try:
|
||||
json_data = json.load(r)
|
||||
except Exception:
|
||||
e = get_exception()
|
||||
self.module.fail_json(
|
||||
msg="Cannot parse %s JSON data." % what,
|
||||
details=e.message)
|
||||
|
||||
return json_data
|
||||
|
||||
def _get_url_data(
|
||||
self, url, what=None, msg_status=None, msg_exception=None,
|
||||
**kwargs):
|
||||
# Compose default messages
|
||||
if msg_status is None:
|
||||
msg_status = "Cannot get %s" % what
|
||||
|
||||
if msg_exception is None:
|
||||
msg_exception = "Retrieval of %s failed." % what
|
||||
|
||||
# Get the URL data
|
||||
try:
|
||||
response, info = fetch_url(
|
||||
self.module, url, timeout=self.timeout, **kwargs)
|
||||
|
||||
if info['status'] != 200:
|
||||
self.module.fail_json(msg=msg_status, details=info['msg'])
|
||||
except Exception:
|
||||
e = get_exception()
|
||||
self.module.fail_json(msg=msg_exception, details=e.message)
|
||||
|
||||
return response
|
||||
|
||||
def _get_crumb(self):
|
||||
crumb_data = self._get_json_data(
|
||||
"%s/%s" % (self.url, "crumbIssuer/api/json"), 'Crumb')
|
||||
|
||||
if 'crumbRequestField' in crumb_data and 'crumb' in crumb_data:
|
||||
ret = {
|
||||
crumb_data['crumbRequestField']: crumb_data['crumb']
|
||||
}
|
||||
else:
|
||||
self.module.fail_json(
|
||||
msg="Required fields not found in the Crum response.",
|
||||
details=crumb_data)
|
||||
|
||||
return ret
|
||||
|
||||
def _get_installed_plugins(self):
|
||||
plugins_data = self._get_json_data(
|
||||
"%s/%s" % (self.url, "pluginManager/api/json?depth=1"),
|
||||
'list of plugins')
|
||||
|
||||
# Check if we got valid data
|
||||
if 'plugins' not in plugins_data:
|
||||
self.module.fail_json(msg="No valid plugin data found.")
|
||||
|
||||
# Create final list of installed/pined plugins
|
||||
self.is_installed = False
|
||||
self.is_pinned = False
|
||||
self.is_enabled = False
|
||||
|
||||
for p in plugins_data['plugins']:
|
||||
if p['shortName'] == self.params['name']:
|
||||
self.is_installed = True
|
||||
|
||||
if p['pinned']:
|
||||
self.is_pinned = True
|
||||
|
||||
if p['enabled']:
|
||||
self.is_enabled = True
|
||||
|
||||
break
|
||||
|
||||
def install(self):
|
||||
changed = False
|
||||
plugin_file = (
|
||||
'%s/plugins/%s.jpi' % (
|
||||
self.params['jenkins_home'],
|
||||
self.params['name']))
|
||||
|
||||
if not self.is_installed and self.params['version'] is None:
|
||||
if not self.module.check_mode:
|
||||
# Install the plugin (with dependencies)
|
||||
install_script = (
|
||||
'd = Jenkins.instance.updateCenter.getPlugin("%s")'
|
||||
'.deploy(); d.get();' % self.params['name'])
|
||||
|
||||
if self.params['with_dependencies']:
|
||||
install_script = (
|
||||
'Jenkins.instance.updateCenter.getPlugin("%s")'
|
||||
'.getNeededDependencies().each{it.deploy()}; %s' % (
|
||||
self.params['name'], install_script))
|
||||
|
||||
script_data = {
|
||||
'script': install_script
|
||||
}
|
||||
script_data.update(self.crumb)
|
||||
data = urllib.urlencode(script_data)
|
||||
|
||||
# Send the installation request
|
||||
r = self._get_url_data(
|
||||
"%s/scriptText" % self.url,
|
||||
msg_status="Cannot install plugin.",
|
||||
msg_exception="Plugin installation has failed.",
|
||||
data=data)
|
||||
|
||||
changed = True
|
||||
else:
|
||||
# Check if the plugin directory exists
|
||||
if not os.path.isdir(self.params['jenkins_home']):
|
||||
self.module.fail_json(
|
||||
msg="Jenkins home directory doesn't exist.")
|
||||
|
||||
md5sum_old = None
|
||||
if os.path.isfile(plugin_file):
|
||||
# Make the checksum of the currently installed plugin
|
||||
md5sum_old = hashlib.md5(
|
||||
open(plugin_file, 'rb').read()).hexdigest()
|
||||
|
||||
if self.params['version'] in [None, 'latest']:
|
||||
# Take latest version
|
||||
plugin_url = (
|
||||
"%s/latest/%s.hpi" % (
|
||||
self.params['updates_url'],
|
||||
self.params['name']))
|
||||
else:
|
||||
# Take specific version
|
||||
plugin_url = (
|
||||
"{0}/download/plugins/"
|
||||
"{1}/{2}/{1}.hpi".format(
|
||||
self.params['updates_url'],
|
||||
self.params['name'],
|
||||
self.params['version']))
|
||||
|
||||
if (
|
||||
self.params['updates_expiration'] == 0 or
|
||||
self.params['version'] not in [None, 'latest'] or
|
||||
md5sum_old is None):
|
||||
|
||||
# Download the plugin file directly
|
||||
r = self._download_plugin(plugin_url)
|
||||
|
||||
# Write downloaded plugin into file if checksums don't match
|
||||
if md5sum_old is None:
|
||||
# No previously installed plugin
|
||||
if not self.module.check_mode:
|
||||
self._write_file(plugin_file, r)
|
||||
|
||||
changed = True
|
||||
else:
|
||||
# Get data for the MD5
|
||||
data = r.read()
|
||||
|
||||
# Make new checksum
|
||||
md5sum_new = hashlib.md5(data).hexdigest()
|
||||
|
||||
# If the checksum is different from the currently installed
|
||||
# plugin, store the new plugin
|
||||
if md5sum_old != md5sum_new:
|
||||
if not self.module.check_mode:
|
||||
self._write_file(plugin_file, data)
|
||||
|
||||
changed = True
|
||||
else:
|
||||
# Check for update from the updates JSON file
|
||||
plugin_data = self._download_updates()
|
||||
|
||||
try:
|
||||
sha1_old = hashlib.sha1(open(plugin_file, 'rb').read())
|
||||
except Exception:
|
||||
e = get_exception()
|
||||
self.module.fail_json(
|
||||
msg="Cannot calculate SHA1 of the old plugin.",
|
||||
details=e.message)
|
||||
|
||||
sha1sum_old = base64.b64encode(sha1_old.digest())
|
||||
|
||||
# If the latest version changed, download it
|
||||
if sha1sum_old != plugin_data['sha1']:
|
||||
if not self.module.check_mode:
|
||||
r = self._download_plugin(plugin_url)
|
||||
self._write_file(plugin_file, r)
|
||||
|
||||
changed = True
|
||||
|
||||
# Change file attributes if needed
|
||||
if os.path.isfile(plugin_file):
|
||||
params = {
|
||||
'dest': plugin_file
|
||||
}
|
||||
params.update(self.params)
|
||||
file_args = self.module.load_file_common_arguments(params)
|
||||
|
||||
if not self.module.check_mode:
|
||||
# Not sure how to run this in the check mode
|
||||
changed = self.module.set_fs_attributes_if_different(
|
||||
file_args, changed)
|
||||
else:
|
||||
# See the comment above
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
|
||||
def _download_updates(self):
|
||||
updates_filename = 'jenkins-plugin-cache.json'
|
||||
updates_dir = os.path.expanduser('~/.ansible/tmp')
|
||||
updates_file = "%s/%s" % (updates_dir, updates_filename)
|
||||
download_updates = True
|
||||
|
||||
# Check if we need to download new updates file
|
||||
if os.path.isfile(updates_file):
|
||||
# Get timestamp when the file was changed last time
|
||||
ts_file = os.stat(updates_file).st_mtime
|
||||
ts_now = time.time()
|
||||
|
||||
if ts_now - ts_file < self.params['updates_expiration']:
|
||||
download_updates = False
|
||||
|
||||
updates_file_orig = updates_file
|
||||
|
||||
# Download the updates file if needed
|
||||
if download_updates:
|
||||
url = "%s/update-center.json" % self.params['updates_url']
|
||||
|
||||
# Get the data
|
||||
r = self._get_url_data(
|
||||
url,
|
||||
msg_status="Remote updates not found.",
|
||||
msg_exception="Updates download failed.")
|
||||
|
||||
# Write the updates file
|
||||
updates_file = tempfile.mkstemp()
|
||||
|
||||
try:
|
||||
fd = open(updates_file, 'wb')
|
||||
except IOError:
|
||||
e = get_exception()
|
||||
self.module.fail_json(
|
||||
msg="Cannot open the tmp updates file %s." % updates_file,
|
||||
details=str(e))
|
||||
|
||||
fd.write(r.read())
|
||||
|
||||
try:
|
||||
fd.close()
|
||||
except IOError:
|
||||
e = get_exception()
|
||||
self.module.fail_json(
|
||||
msg="Cannot close the tmp updates file %s." % updates_file,
|
||||
detail=str(e))
|
||||
|
||||
# Open the updates file
|
||||
try:
|
||||
f = open(updates_file)
|
||||
except IOError:
|
||||
e = get_exception()
|
||||
self.module.fail_json(
|
||||
msg="Cannot open temporal updates file.",
|
||||
details=str(e))
|
||||
|
||||
i = 0
|
||||
for line in f:
|
||||
# Read only the second line
|
||||
if i == 1:
|
||||
try:
|
||||
data = json.loads(line)
|
||||
except Exception:
|
||||
e = get_exception()
|
||||
self.module.fail_json(
|
||||
msg="Cannot load JSON data from the tmp updates file.",
|
||||
details=e.message)
|
||||
|
||||
break
|
||||
|
||||
i += 1
|
||||
|
||||
# Move the updates file to the right place if we could read it
|
||||
if download_updates:
|
||||
# Make sure the destination directory exists
|
||||
if not os.path.isdir(updates_dir):
|
||||
try:
|
||||
os.makedirs(updates_dir, int('0700', 8))
|
||||
except OSError:
|
||||
e = get_exception()
|
||||
self.module.fail_json(
|
||||
msg="Cannot create temporal directory.",
|
||||
details=e.message)
|
||||
|
||||
self.module.atomic_move(updates_file, updates_file_orig)
|
||||
|
||||
# Check if we have the plugin data available
|
||||
if 'plugins' not in data or self.params['name'] not in data['plugins']:
|
||||
self.module.fail_json(
|
||||
msg="Cannot find plugin data in the updates file.")
|
||||
|
||||
return data['plugins'][self.params['name']]
|
||||
|
||||
def _download_plugin(self, plugin_url):
|
||||
# Download the plugin
|
||||
r = self._get_url_data(
|
||||
plugin_url,
|
||||
msg_status="Plugin not found.",
|
||||
msg_exception="Plugin download failed.")
|
||||
|
||||
return r
|
||||
|
||||
def _write_file(self, f, data):
|
||||
# Store the plugin into a temp file and then move it
|
||||
tmp_f = tempfile.mkstemp()
|
||||
|
||||
try:
|
||||
fd = open(tmp_f, 'wb')
|
||||
except IOError:
|
||||
e = get_exception()
|
||||
self.module.fail_json(
|
||||
msg='Cannot open the temporal plugin file %s.' % tmp_f,
|
||||
details=str(e))
|
||||
|
||||
if isinstance(data, str):
|
||||
d = data
|
||||
else:
|
||||
d = data.read()
|
||||
|
||||
fd.write(d)
|
||||
|
||||
try:
|
||||
fd.close()
|
||||
except IOError:
|
||||
e = get_exception()
|
||||
self.module.fail_json(
|
||||
msg='Cannot close the temporal plugin file %s.' % tmp_f,
|
||||
details=str(e))
|
||||
|
||||
# Move the file onto the right place
|
||||
self.module.atomic_move(tmp_f, f)
|
||||
|
||||
def uninstall(self):
|
||||
changed = False
|
||||
|
||||
# Perform the action
|
||||
if self.is_installed:
|
||||
if not self.module.check_mode:
|
||||
self._pm_query('doUninstall', 'Uninstallation')
|
||||
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
|
||||
def pin(self):
|
||||
return self._pinning('pin')
|
||||
|
||||
def unpin(self):
|
||||
return self._pinning('unpin')
|
||||
|
||||
def _pinning(self, action):
|
||||
changed = False
|
||||
|
||||
# Check if the plugin is pinned/unpinned
|
||||
if (
|
||||
action == 'pin' and not self.is_pinned or
|
||||
action == 'unpin' and self.is_pinned):
|
||||
|
||||
# Perform the action
|
||||
if not self.module.check_mode:
|
||||
self._pm_query(action, "%sning" % action.capitalize())
|
||||
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
|
||||
def enable(self):
|
||||
return self._enabling('enable')
|
||||
|
||||
def disable(self):
|
||||
return self._enabling('disable')
|
||||
|
||||
def _enabling(self, action):
|
||||
changed = False
|
||||
|
||||
# Check if the plugin is pinned/unpinned
|
||||
if (
|
||||
action == 'enable' and not self.is_enabled or
|
||||
action == 'disable' and self.is_enabled):
|
||||
|
||||
# Perform the action
|
||||
if not self.module.check_mode:
|
||||
self._pm_query(
|
||||
"make%sd" % action.capitalize(),
|
||||
"%sing" % action[:-1].capitalize())
|
||||
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
|
||||
def _pm_query(self, action, msg):
|
||||
url = "%s/pluginManager/plugin/%s/%s" % (
|
||||
self.params['url'], self.params['name'], action)
|
||||
data = urllib.urlencode(self.crumb)
|
||||
|
||||
# Send the request
|
||||
self._get_url_data(
|
||||
url,
|
||||
msg_status="Plugin not found. %s" % url,
|
||||
msg_exception="%s has failed." % msg,
|
||||
data=data)
|
||||
|
||||
|
||||
def main():
|
||||
# Module arguments
|
||||
argument_spec = url_argument_spec()
|
||||
argument_spec.update(
|
||||
group=dict(default='jenkins'),
|
||||
jenkins_home=dict(default='/var/lib/jenkins'),
|
||||
mode=dict(default='0644', type='raw'),
|
||||
name=dict(required=True),
|
||||
owner=dict(default='jenkins'),
|
||||
params=dict(type='dict'),
|
||||
state=dict(
|
||||
choices=[
|
||||
'present',
|
||||
'absent',
|
||||
'pinned',
|
||||
'unpinned',
|
||||
'enabled',
|
||||
'disabled',
|
||||
'latest'],
|
||||
default='present'),
|
||||
timeout=dict(default=30, type="int"),
|
||||
updates_expiration=dict(default=86400, type="int"),
|
||||
updates_url=dict(default='https://updates.jenkins-ci.org'),
|
||||
url=dict(default='http://localhost:8080'),
|
||||
url_password=dict(no_log=True),
|
||||
version=dict(),
|
||||
with_dependencies=dict(default=True, type='bool'),
|
||||
)
|
||||
# Module settings
|
||||
module = AnsibleModule(
|
||||
argument_spec=argument_spec,
|
||||
add_file_common_args=True,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
# Update module parameters by user's parameters if defined
|
||||
if 'params' in module.params and isinstance(module.params['params'], dict):
|
||||
module.params.update(module.params['params'])
|
||||
# Remove the params
|
||||
module.params.pop('params', None)
|
||||
|
||||
# Force basic authentication
|
||||
module.params['force_basic_auth'] = True
|
||||
|
||||
# Convert timeout to float
|
||||
try:
|
||||
module.params['timeout'] = float(module.params['timeout'])
|
||||
except ValueError:
|
||||
e = get_exception()
|
||||
module.fail_json(
|
||||
msg='Cannot convert %s to float.' % module.params['timeout'],
|
||||
details=str(e))
|
||||
|
||||
# Set version to latest if state is latest
|
||||
if module.params['state'] == 'latest':
|
||||
module.params['state'] = 'present'
|
||||
module.params['version'] = 'latest'
|
||||
|
||||
# Create some shortcuts
|
||||
name = module.params['name']
|
||||
state = module.params['state']
|
||||
|
||||
# Initial change state of the task
|
||||
changed = False
|
||||
|
||||
# Instantiate the JenkinsPlugin object
|
||||
jp = JenkinsPlugin(module)
|
||||
|
||||
# Perform action depending on the requested state
|
||||
if state == 'present':
|
||||
changed = jp.install()
|
||||
elif state == 'absent':
|
||||
changed = jp.uninstall()
|
||||
elif state == 'pinned':
|
||||
changed = jp.pin()
|
||||
elif state == 'unpinned':
|
||||
changed = jp.unpin()
|
||||
elif state == 'enabled':
|
||||
changed = jp.enable()
|
||||
elif state == 'disabled':
|
||||
changed = jp.disable()
|
||||
|
||||
# Print status of the change
|
||||
module.exit_json(changed=changed, plugin=name, state=state)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
431
lib/ansible/modules/web_infrastructure/jira.py
Executable file
431
lib/ansible/modules/web_infrastructure/jira.py
Executable file
@@ -0,0 +1,431 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2014, Steve Smith <ssmith@atlassian.com>
|
||||
# Atlassian open-source approval reference OSR-76.
|
||||
#
|
||||
# This file is part of Ansible.
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
ANSIBLE_METADATA = {'status': ['preview'],
|
||||
'supported_by': 'community',
|
||||
'version': '1.0'}
|
||||
|
||||
DOCUMENTATION = """
|
||||
module: jira
|
||||
version_added: "1.6"
|
||||
short_description: create and modify issues in a JIRA instance
|
||||
description:
|
||||
- Create and modify issues in a JIRA instance.
|
||||
|
||||
options:
|
||||
uri:
|
||||
required: true
|
||||
description:
|
||||
- Base URI for the JIRA instance
|
||||
|
||||
operation:
|
||||
required: true
|
||||
aliases: [ command ]
|
||||
choices: [ create, comment, edit, fetch, transition ]
|
||||
description:
|
||||
- The operation to perform.
|
||||
|
||||
username:
|
||||
required: true
|
||||
description:
|
||||
- The username to log-in with.
|
||||
|
||||
password:
|
||||
required: true
|
||||
description:
|
||||
- The password to log-in with.
|
||||
|
||||
project:
|
||||
aliases: [ prj ]
|
||||
required: false
|
||||
description:
|
||||
- The project for this operation. Required for issue creation.
|
||||
|
||||
summary:
|
||||
required: false
|
||||
description:
|
||||
- The issue summary, where appropriate.
|
||||
|
||||
description:
|
||||
required: false
|
||||
description:
|
||||
- The issue description, where appropriate.
|
||||
|
||||
issuetype:
|
||||
required: false
|
||||
description:
|
||||
- The issue type, for issue creation.
|
||||
|
||||
issue:
|
||||
required: false
|
||||
description:
|
||||
- An existing issue key to operate on.
|
||||
|
||||
comment:
|
||||
required: false
|
||||
description:
|
||||
- The comment text to add.
|
||||
|
||||
status:
|
||||
required: false
|
||||
description:
|
||||
- The desired status; only relevant for the transition operation.
|
||||
|
||||
assignee:
|
||||
required: false
|
||||
description:
|
||||
- Sets the assignee on create or transition operations. Note not all transitions will allow this.
|
||||
|
||||
linktype:
|
||||
required: false
|
||||
version_added: 2.3
|
||||
description:
|
||||
- Set type of link, when action 'link' selected
|
||||
|
||||
inwardissue:
|
||||
required: false
|
||||
version_added: 2.3
|
||||
description:
|
||||
- set issue from which link will be created
|
||||
|
||||
outwardissue:
|
||||
required: false
|
||||
version_added: 2.3
|
||||
description:
|
||||
- set issue to which link will be created
|
||||
|
||||
fields:
|
||||
required: false
|
||||
description:
|
||||
- This is a free-form data structure that can contain arbitrary data. This is passed directly to the JIRA REST API (possibly after merging with other required data, as when passed to create). See examples for more information, and the JIRA REST API for the structure required for various fields.
|
||||
|
||||
notes:
|
||||
- "Currently this only works with basic-auth."
|
||||
|
||||
author: "Steve Smith (@tarka)"
|
||||
"""
|
||||
|
||||
EXAMPLES = """
|
||||
# Create a new issue and add a comment to it:
|
||||
- name: Create an issue
|
||||
jira:
|
||||
uri: '{{ server }}'
|
||||
username: '{{ user }}'
|
||||
password: '{{ pass }}'
|
||||
project: ANS
|
||||
operation: create
|
||||
summary: Example Issue
|
||||
description: Created using Ansible
|
||||
issuetype: Task
|
||||
register: issue
|
||||
|
||||
- name: Comment on issue
|
||||
jira:
|
||||
uri: '{{ server }}'
|
||||
username: '{{ user }}'
|
||||
password: '{{ pass }}'
|
||||
issue: '{{ issue.meta.key }}'
|
||||
operation: comment
|
||||
comment: A comment added by Ansible
|
||||
|
||||
# Assign an existing issue using edit
|
||||
- name: Assign an issue using free-form fields
|
||||
jira:
|
||||
uri: '{{ server }}'
|
||||
username: '{{ user }}'
|
||||
password: '{{ pass }}'
|
||||
issue: '{{ issue.meta.key}}'
|
||||
operation: edit
|
||||
assignee: ssmith
|
||||
|
||||
# Create an issue with an existing assignee
|
||||
- name: Create an assigned issue
|
||||
jira:
|
||||
uri: '{{ server }}'
|
||||
username: '{{ user }}'
|
||||
password: '{{ pass }}'
|
||||
project: ANS
|
||||
operation: create
|
||||
summary: Assigned issue
|
||||
description: Created and assigned using Ansible
|
||||
issuetype: Task
|
||||
assignee: ssmith
|
||||
|
||||
# Edit an issue
|
||||
- name: Set the labels on an issue using free-form fields
|
||||
jira:
|
||||
uri: '{{ server }}'
|
||||
username: '{{ user }}'
|
||||
password: '{{ pass }}'
|
||||
issue: '{{ issue.meta.key }}'
|
||||
operation: edit
|
||||
args:
|
||||
fields:
|
||||
labels:
|
||||
- autocreated
|
||||
- ansible
|
||||
|
||||
# Retrieve metadata for an issue and use it to create an account
|
||||
- name: Get an issue
|
||||
jira:
|
||||
uri: '{{ server }}'
|
||||
username: '{{ user }}'
|
||||
password: '{{ pass }}'
|
||||
project: ANS
|
||||
operation: fetch
|
||||
issue: ANS-63
|
||||
register: issue
|
||||
|
||||
- name: Create a unix account for the reporter
|
||||
become: true
|
||||
user:
|
||||
name: '{{ issue.meta.fields.creator.name }}'
|
||||
comment: '{{issue.meta.fields.creator.displayName }}'
|
||||
|
||||
- name: Create link from HSP-1 to MKY-1
|
||||
jira: uri={{server}} username={{user}} password={{pass}} operation=link
|
||||
linktype=Relate inwardissue=HSP-1 outwardissue=MKY-1
|
||||
|
||||
# Transition an issue by target status
|
||||
- name: Close the issue
|
||||
jira:
|
||||
uri: '{{ server }}'
|
||||
username: '{{ user }}'
|
||||
password: '{{ pass }}'
|
||||
issue: '{{ issue.meta.key }}'
|
||||
operation: transition
|
||||
status: Done
|
||||
"""
|
||||
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
try:
|
||||
import simplejson as json
|
||||
except ImportError:
|
||||
# Let snippet from module_utils/basic.py return a proper error in this case
|
||||
pass
|
||||
|
||||
import base64
|
||||
|
||||
from ansible.module_utils.basic import *
|
||||
from ansible.module_utils.urls import *
|
||||
from ansible.module_utils.pycompat24 import get_exception
|
||||
|
||||
def request(url, user, passwd, data=None, method=None):
|
||||
if data:
|
||||
data = json.dumps(data)
|
||||
|
||||
# NOTE: fetch_url uses a password manager, which follows the
|
||||
# standard request-then-challenge basic-auth semantics. However as
|
||||
# JIRA allows some unauthorised operations it doesn't necessarily
|
||||
# send the challenge, so the request occurs as the anonymous user,
|
||||
# resulting in unexpected results. To work around this we manually
|
||||
# inject the basic-auth header up-front to ensure that JIRA treats
|
||||
# the requests as authorized for this user.
|
||||
auth = base64.encodestring('%s:%s' % (user, passwd)).replace('\n', '')
|
||||
response, info = fetch_url(module, url, data=data, method=method,
|
||||
headers={'Content-Type':'application/json',
|
||||
'Authorization':"Basic %s" % auth})
|
||||
|
||||
if info['status'] not in (200, 201, 204):
|
||||
module.fail_json(msg=info['msg'])
|
||||
|
||||
body = response.read()
|
||||
|
||||
if body:
|
||||
return json.loads(body)
|
||||
else:
|
||||
return {}
|
||||
|
||||
def post(url, user, passwd, data):
|
||||
return request(url, user, passwd, data=data, method='POST')
|
||||
|
||||
def put(url, user, passwd, data):
|
||||
return request(url, user, passwd, data=data, method='PUT')
|
||||
|
||||
def get(url, user, passwd):
|
||||
return request(url, user, passwd)
|
||||
|
||||
|
||||
def create(restbase, user, passwd, params):
|
||||
createfields = {
|
||||
'project': { 'key': params['project'] },
|
||||
'summary': params['summary'],
|
||||
'description': params['description'],
|
||||
'issuetype': { 'name': params['issuetype'] }}
|
||||
|
||||
# Merge in any additional or overridden fields
|
||||
if params['fields']:
|
||||
createfields.update(params['fields'])
|
||||
|
||||
data = {'fields': createfields}
|
||||
|
||||
url = restbase + '/issue/'
|
||||
|
||||
ret = post(url, user, passwd, data)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def comment(restbase, user, passwd, params):
|
||||
data = {
|
||||
'body': params['comment']
|
||||
}
|
||||
|
||||
url = restbase + '/issue/' + params['issue'] + '/comment'
|
||||
|
||||
ret = post(url, user, passwd, data)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def edit(restbase, user, passwd, params):
|
||||
data = {
|
||||
'fields': params['fields']
|
||||
}
|
||||
|
||||
url = restbase + '/issue/' + params['issue']
|
||||
|
||||
ret = put(url, user, passwd, data)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def fetch(restbase, user, passwd, params):
|
||||
url = restbase + '/issue/' + params['issue']
|
||||
ret = get(url, user, passwd)
|
||||
return ret
|
||||
|
||||
|
||||
def transition(restbase, user, passwd, params):
|
||||
# Find the transition id
|
||||
turl = restbase + '/issue/' + params['issue'] + "/transitions"
|
||||
tmeta = get(turl, user, passwd)
|
||||
|
||||
target = params['status']
|
||||
tid = None
|
||||
for t in tmeta['transitions']:
|
||||
if t['name'] == target:
|
||||
tid = t['id']
|
||||
break
|
||||
|
||||
if not tid:
|
||||
raise ValueError("Failed find valid transition for '%s'" % target)
|
||||
|
||||
# Perform it
|
||||
url = restbase + '/issue/' + params['issue'] + "/transitions"
|
||||
data = { 'transition': { "id" : tid },
|
||||
'fields': params['fields']}
|
||||
|
||||
ret = post(url, user, passwd, data)
|
||||
|
||||
return ret
|
||||
|
||||
def link(restbase, user, passwd, params):
|
||||
data = {
|
||||
'type': { 'name': params['linktype'] },
|
||||
'inwardIssue': { 'key': params['inwardissue'] },
|
||||
'outwardIssue': { 'key': params['outwardissue'] },
|
||||
}
|
||||
|
||||
url = restbase + '/issueLink/'
|
||||
|
||||
ret = post(url, user, passwd, data)
|
||||
|
||||
return ret
|
||||
|
||||
# Some parameters are required depending on the operation:
|
||||
OP_REQUIRED = dict(create=['project', 'issuetype', 'summary', 'description'],
|
||||
comment=['issue', 'comment'],
|
||||
edit=[],
|
||||
fetch=['issue'],
|
||||
transition=['status'],
|
||||
link=['linktype', 'inwardissue', 'outwardissue'])
|
||||
|
||||
def main():
|
||||
|
||||
global module
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
uri=dict(required=True),
|
||||
operation=dict(choices=['create', 'comment', 'edit', 'fetch', 'transition', 'link'],
|
||||
aliases=['command'], required=True),
|
||||
username=dict(required=True),
|
||||
password=dict(required=True),
|
||||
project=dict(),
|
||||
summary=dict(),
|
||||
description=dict(),
|
||||
issuetype=dict(),
|
||||
issue=dict(aliases=['ticket']),
|
||||
comment=dict(),
|
||||
status=dict(),
|
||||
assignee=dict(),
|
||||
fields=dict(default={}, type='dict'),
|
||||
linktype=dict(),
|
||||
inwardissue=dict(),
|
||||
outwardissue=dict(),
|
||||
),
|
||||
supports_check_mode=False
|
||||
)
|
||||
|
||||
op = module.params['operation']
|
||||
|
||||
# Check we have the necessary per-operation parameters
|
||||
missing = []
|
||||
for parm in OP_REQUIRED[op]:
|
||||
if not module.params[parm]:
|
||||
missing.append(parm)
|
||||
if missing:
|
||||
module.fail_json(msg="Operation %s require the following missing parameters: %s" % (op, ",".join(missing)))
|
||||
|
||||
# Handle rest of parameters
|
||||
uri = module.params['uri']
|
||||
user = module.params['username']
|
||||
passwd = module.params['password']
|
||||
if module.params['assignee']:
|
||||
module.params['fields']['assignee'] = { 'name': module.params['assignee'] }
|
||||
|
||||
if not uri.endswith('/'):
|
||||
uri = uri+'/'
|
||||
restbase = uri + 'rest/api/2'
|
||||
|
||||
# Dispatch
|
||||
try:
|
||||
|
||||
# Lookup the corresponding method for this operation. This is
|
||||
# safe as the AnsibleModule should remove any unknown operations.
|
||||
thismod = sys.modules[__name__]
|
||||
method = getattr(thismod, op)
|
||||
|
||||
ret = method(restbase, user, passwd, module.params)
|
||||
|
||||
except Exception:
|
||||
e = get_exception()
|
||||
return module.fail_json(msg=e.message)
|
||||
|
||||
|
||||
module.exit_json(changed=True, meta=ret)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
805
lib/ansible/modules/web_infrastructure/letsencrypt.py
Normal file
805
lib/ansible/modules/web_infrastructure/letsencrypt.py
Normal file
@@ -0,0 +1,805 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2016 Michael Gruener <michael.gruener@chaosmoon.net>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import binascii
|
||||
import copy
|
||||
import locale
|
||||
import textwrap
|
||||
from datetime import datetime
|
||||
|
||||
ANSIBLE_METADATA = {'status': ['preview'],
|
||||
'supported_by': 'community',
|
||||
'version': '1.0'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: letsencrypt
|
||||
author: "Michael Gruener (@mgruener)"
|
||||
version_added: "2.2"
|
||||
short_description: Create SSL certificates with Let's Encrypt
|
||||
description:
|
||||
- "Create and renew SSL certificates with Let's Encrypt. Let’s Encrypt is a
|
||||
free, automated, and open certificate authority (CA), run for the
|
||||
public’s benefit. For details see U(https://letsencrypt.org). The current
|
||||
implementation supports the http-01, tls-sni-02 and dns-01 challenges."
|
||||
- "To use this module, it has to be executed at least twice. Either as two
|
||||
different tasks in the same run or during multiple runs."
|
||||
- "Between these two tasks you have to fulfill the required steps for the
|
||||
choosen challenge by whatever means necessary. For http-01 that means
|
||||
creating the necessary challenge file on the destination webserver. For
|
||||
dns-01 the necessary dns record has to be created. tls-sni-02 requires
|
||||
you to create a SSL certificate with the appropriate subjectAlternativeNames.
|
||||
It is I(not) the responsibility of this module to perform these steps."
|
||||
- "For details on how to fulfill these challenges, you might have to read through
|
||||
U(https://tools.ietf.org/html/draft-ietf-acme-acme-02#section-7)"
|
||||
- "Although the defaults are choosen so that the module can be used with
|
||||
the Let's Encrypt CA, the module can be used with any service using the ACME
|
||||
protocol."
|
||||
requirements:
|
||||
- "python >= 2.6"
|
||||
- openssl
|
||||
options:
|
||||
account_key:
|
||||
description:
|
||||
- "File containing the the Let's Encrypt account RSA key."
|
||||
- "Can be created with C(openssl rsa ...)."
|
||||
required: true
|
||||
account_email:
|
||||
description:
|
||||
- "The email address associated with this account."
|
||||
- "It will be used for certificate expiration warnings."
|
||||
required: false
|
||||
default: null
|
||||
acme_directory:
|
||||
description:
|
||||
- "The ACME directory to use. This is the entry point URL to access
|
||||
CA server API."
|
||||
- "For safety reasons the default is set to the Let's Encrypt staging server.
|
||||
This will create technically correct, but untrusted certificates."
|
||||
required: false
|
||||
default: https://acme-staging.api.letsencrypt.org/directory
|
||||
agreement:
|
||||
description:
|
||||
- "URI to a terms of service document you agree to when using the
|
||||
ACME service at C(acme_directory)."
|
||||
required: false
|
||||
default: 'https://letsencrypt.org/documents/LE-SA-v1.1.1-August-1-2016.pdf'
|
||||
challenge:
|
||||
description: The challenge to be performed.
|
||||
required: false
|
||||
choices: [ 'http-01', 'dns-01', 'tls-sni-02']
|
||||
default: 'http-01'
|
||||
csr:
|
||||
description:
|
||||
- "File containing the CSR for the new certificate."
|
||||
- "Can be created with C(openssl csr ...)."
|
||||
- "The CSR may contain multiple Subject Alternate Names, but each one
|
||||
will lead to an individual challenge that must be fulfilled for the
|
||||
CSR to be signed."
|
||||
required: true
|
||||
alias: ['src']
|
||||
data:
|
||||
description:
|
||||
- "The data to validate ongoing challenges."
|
||||
- "The value that must be used here will be provided by a previous use
|
||||
of this module."
|
||||
required: false
|
||||
default: null
|
||||
dest:
|
||||
description: The destination file for the certificate.
|
||||
required: true
|
||||
alias: ['cert']
|
||||
remaining_days:
|
||||
description:
|
||||
- "The number of days the certificate must have left being valid.
|
||||
If C(remaining_days < cert_days), then it will be renewed.
|
||||
If the certificate is not renewed, module return values will not
|
||||
include C(challenge_data)."
|
||||
required: false
|
||||
default: 10
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- letsencrypt:
|
||||
account_key: /etc/pki/cert/private/account.key
|
||||
csr: /etc/pki/cert/csr/sample.com.csr
|
||||
dest: /etc/httpd/ssl/sample.com.crt
|
||||
register: sample_com_challenge
|
||||
|
||||
# perform the necessary steps to fulfill the challenge
|
||||
# for example:
|
||||
#
|
||||
# - copy:
|
||||
# dest: /var/www/html/{{ sample_com_challenge['challenge_data']['sample.com']['http-01']['resource'] }}
|
||||
# content: "{{ sample_com_challenge['challenge_data']['sample.com']['http-01']['resource_value'] }}"
|
||||
# when: sample_com_challenge|changed
|
||||
|
||||
- letsencrypt:
|
||||
account_key: /etc/pki/cert/private/account.key
|
||||
csr: /etc/pki/cert/csr/sample.com.csr
|
||||
dest: /etc/httpd/ssl/sample.com.crt
|
||||
data: "{{ sample_com_challenge }}"
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
cert_days:
|
||||
description: the number of days the certificate remains valid.
|
||||
returned: success
|
||||
challenge_data:
|
||||
description: per domain / challenge type challenge data
|
||||
returned: changed
|
||||
type: dictionary
|
||||
contains:
|
||||
resource:
|
||||
description: the challenge resource that must be created for validation
|
||||
returned: changed
|
||||
type: string
|
||||
sample: .well-known/acme-challenge/evaGxfADs6pSRb2LAv9IZf17Dt3juxGJ-PCt92wr-oA
|
||||
resource_value:
|
||||
description: the value the resource has to produce for the validation
|
||||
returned: changed
|
||||
type: string
|
||||
sample: IlirfxKKXA...17Dt3juxGJ-PCt92wr-oA
|
||||
authorizations:
|
||||
description: ACME authorization data.
|
||||
returned: changed
|
||||
type: list
|
||||
contains:
|
||||
authorization:
|
||||
description: ACME authorization object. See https://tools.ietf.org/html/draft-ietf-acme-acme-02#section-6.1.2
|
||||
returned: success
|
||||
type: dict
|
||||
'''
|
||||
|
||||
def nopad_b64(data):
|
||||
return base64.urlsafe_b64encode(data).decode('utf8').replace("=", "")
|
||||
|
||||
def simple_get(module,url):
|
||||
resp, info = fetch_url(module, url, method='GET')
|
||||
|
||||
result = None
|
||||
try:
|
||||
content = resp.read()
|
||||
except AttributeError:
|
||||
if info['body']:
|
||||
content = info['body']
|
||||
|
||||
if content:
|
||||
if info['content-type'].startswith('application/json'):
|
||||
try:
|
||||
result = module.from_json(content.decode('utf8'))
|
||||
except ValueError:
|
||||
module.fail_json(msg="Failed to parse the ACME response: {0} {1}".format(url,content))
|
||||
else:
|
||||
result = content
|
||||
|
||||
if info['status'] >= 400:
|
||||
module.fail_json(msg="ACME request failed: CODE: {0} RESULT:{1}".format(info['status'],result))
|
||||
return result
|
||||
|
||||
def get_cert_days(module,cert_file):
|
||||
'''
|
||||
Return the days the certificate in cert_file remains valid and -1
|
||||
if the file was not found.
|
||||
'''
|
||||
if not os.path.exists(cert_file):
|
||||
return -1
|
||||
|
||||
openssl_bin = module.get_bin_path('openssl', True)
|
||||
openssl_cert_cmd = [openssl_bin, "x509", "-in", cert_file, "-noout", "-text"]
|
||||
_, out, _ = module.run_command(openssl_cert_cmd,check_rc=True)
|
||||
try:
|
||||
not_after_str = re.search(r"\s+Not After\s*:\s+(.*)",out.decode('utf8')).group(1)
|
||||
not_after = datetime.datetime.fromtimestamp(time.mktime(time.strptime(not_after_str,'%b %d %H:%M:%S %Y %Z')))
|
||||
except AttributeError:
|
||||
module.fail_json(msg="No 'Not after' date found in {0}".format(cert_file))
|
||||
except ValueError:
|
||||
module.fail_json(msg="Failed to parse 'Not after' date of {0}".format(cert_file))
|
||||
now = datetime.datetime.utcnow()
|
||||
return (not_after - now).days
|
||||
|
||||
# function source: network/basics/uri.py
|
||||
def write_file(module, dest, content):
|
||||
'''
|
||||
Write content to destination file dest, only if the content
|
||||
has changed.
|
||||
'''
|
||||
changed = False
|
||||
# create a tempfile with some test content
|
||||
_, tmpsrc = tempfile.mkstemp()
|
||||
f = open(tmpsrc, 'wb')
|
||||
try:
|
||||
f.write(content)
|
||||
except Exception as err:
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json(msg="failed to create temporary content file: %s" % str(err))
|
||||
f.close()
|
||||
checksum_src = None
|
||||
checksum_dest = None
|
||||
# raise an error if there is no tmpsrc file
|
||||
if not os.path.exists(tmpsrc):
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json(msg="Source %s does not exist" % (tmpsrc))
|
||||
if not os.access(tmpsrc, os.R_OK):
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json( msg="Source %s not readable" % (tmpsrc))
|
||||
checksum_src = module.sha1(tmpsrc)
|
||||
# check if there is no dest file
|
||||
if os.path.exists(dest):
|
||||
# raise an error if copy has no permission on dest
|
||||
if not os.access(dest, os.W_OK):
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json(msg="Destination %s not writable" % (dest))
|
||||
if not os.access(dest, os.R_OK):
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json(msg="Destination %s not readable" % (dest))
|
||||
checksum_dest = module.sha1(dest)
|
||||
else:
|
||||
if not os.access(os.path.dirname(dest), os.W_OK):
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json(msg="Destination dir %s not writable" % (os.path.dirname(dest)))
|
||||
if checksum_src != checksum_dest:
|
||||
try:
|
||||
shutil.copyfile(tmpsrc, dest)
|
||||
changed = True
|
||||
except Exception as err:
|
||||
os.remove(tmpsrc)
|
||||
module.fail_json(msg="failed to copy %s to %s: %s" % (tmpsrc, dest, str(err)))
|
||||
os.remove(tmpsrc)
|
||||
return changed
|
||||
|
||||
class ACMEDirectory(object):
|
||||
'''
|
||||
The ACME server directory. Gives access to the available resources
|
||||
and the Replay-Nonce for a given URI. This only works for
|
||||
URIs that permit GET requests (so normally not the ones that
|
||||
require authentication).
|
||||
https://tools.ietf.org/html/draft-ietf-acme-acme-02#section-6.2
|
||||
'''
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
self.directory_root = module.params['acme_directory']
|
||||
|
||||
self.directory = simple_get(self.module,self.directory_root)
|
||||
|
||||
def __getitem__(self, key): return self.directory[key]
|
||||
|
||||
def get_nonce(self,resource=None):
|
||||
url = self.directory_root
|
||||
if resource is not None:
|
||||
url = resource
|
||||
_, info = fetch_url(self.module, url, method='HEAD')
|
||||
if info['status'] != 200:
|
||||
self.module.fail_json(msg="Failed to get replay-nonce, got status {0}".format(info['status']))
|
||||
return info['replay-nonce']
|
||||
|
||||
class ACMEAccount(object):
|
||||
'''
|
||||
ACME account object. Handles the authorized communication with the
|
||||
ACME server. Provides access to accound bound information like
|
||||
the currently active authorizations and valid certificates
|
||||
'''
|
||||
def __init__(self,module):
|
||||
self.module = module
|
||||
self.agreement = module.params['agreement']
|
||||
self.key = module.params['account_key']
|
||||
self.email = module.params['account_email']
|
||||
self.data = module.params['data']
|
||||
self.directory = ACMEDirectory(module)
|
||||
self.uri = None
|
||||
self.changed = False
|
||||
|
||||
self._authz_list_uri = None
|
||||
self._certs_list_uri = None
|
||||
|
||||
if not os.path.exists(self.key):
|
||||
module.fail_json(msg="Account key %s not found" % (self.key))
|
||||
|
||||
self._openssl_bin = module.get_bin_path('openssl', True)
|
||||
|
||||
pub_hex, pub_exp = self._parse_account_key(self.key)
|
||||
self.jws_header = {
|
||||
"alg": "RS256",
|
||||
"jwk": {
|
||||
"e": nopad_b64(binascii.unhexlify(pub_exp.encode("utf-8"))),
|
||||
"kty": "RSA",
|
||||
"n": nopad_b64(binascii.unhexlify(re.sub(r"(\s|:)", "", pub_hex).encode("utf-8"))),
|
||||
},
|
||||
}
|
||||
self.init_account()
|
||||
|
||||
def get_keyauthorization(self,token):
|
||||
'''
|
||||
Returns the key authorization for the given token
|
||||
https://tools.ietf.org/html/draft-ietf-acme-acme-02#section-7.1
|
||||
'''
|
||||
accountkey_json = json.dumps(self.jws_header['jwk'], sort_keys=True, separators=(',', ':'))
|
||||
thumbprint = nopad_b64(hashlib.sha256(accountkey_json.encode('utf8')).digest())
|
||||
return "{0}.{1}".format(token, thumbprint)
|
||||
|
||||
def _parse_account_key(self,key):
|
||||
'''
|
||||
Parses an RSA key file in PEM format and returns the modulus
|
||||
and public exponent of the key
|
||||
'''
|
||||
openssl_keydump_cmd = [self._openssl_bin, "rsa", "-in", key, "-noout", "-text"]
|
||||
_, out, _ = self.module.run_command(openssl_keydump_cmd,check_rc=True)
|
||||
|
||||
pub_hex, pub_exp = re.search(
|
||||
r"modulus:\n\s+00:([a-f0-9\:\s]+?)\npublicExponent: ([0-9]+)",
|
||||
out.decode('utf8'), re.MULTILINE|re.DOTALL).groups()
|
||||
pub_exp = "{0:x}".format(int(pub_exp))
|
||||
if len(pub_exp) % 2:
|
||||
pub_exp = "0{0}".format(pub_exp)
|
||||
|
||||
return pub_hex, pub_exp
|
||||
|
||||
def send_signed_request(self, url, payload):
|
||||
'''
|
||||
Sends a JWS signed HTTP POST request to the ACME server and returns
|
||||
the response as dictionary
|
||||
https://tools.ietf.org/html/draft-ietf-acme-acme-02#section-5.2
|
||||
'''
|
||||
protected = copy.deepcopy(self.jws_header)
|
||||
protected["nonce"] = self.directory.get_nonce()
|
||||
|
||||
try:
|
||||
payload64 = nopad_b64(self.module.jsonify(payload).encode('utf8'))
|
||||
protected64 = nopad_b64(self.module.jsonify(protected).encode('utf8'))
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Failed to encode payload / headers as JSON: {0}".format(e))
|
||||
|
||||
openssl_sign_cmd = [self._openssl_bin, "dgst", "-sha256", "-sign", self.key]
|
||||
sign_payload = "{0}.{1}".format(protected64, payload64).encode('utf8')
|
||||
_, out, _ = self.module.run_command(openssl_sign_cmd,data=sign_payload,check_rc=True, binary_data=True)
|
||||
|
||||
data = self.module.jsonify({
|
||||
"header": self.jws_header,
|
||||
"protected": protected64,
|
||||
"payload": payload64,
|
||||
"signature": nopad_b64(out),
|
||||
})
|
||||
|
||||
resp, info = fetch_url(self.module, url, data=data, method='POST')
|
||||
result = None
|
||||
try:
|
||||
content = resp.read()
|
||||
except AttributeError:
|
||||
if info['body']:
|
||||
content = info['body']
|
||||
|
||||
if content:
|
||||
if info['content-type'].startswith('application/json'):
|
||||
try:
|
||||
result = self.module.from_json(content.decode('utf8'))
|
||||
except ValueError:
|
||||
self.module.fail_json(msg="Failed to parse the ACME response: {0} {1}".format(url,content))
|
||||
else:
|
||||
result = content
|
||||
|
||||
return result,info
|
||||
|
||||
def _new_reg(self,contact=[]):
|
||||
'''
|
||||
Registers a new ACME account. Returns True if the account was
|
||||
created and False if it already existed (e.g. it was not newly
|
||||
created)
|
||||
https://tools.ietf.org/html/draft-ietf-acme-acme-02#section-6.3
|
||||
'''
|
||||
if self.uri is not None:
|
||||
return True
|
||||
|
||||
new_reg = {
|
||||
'resource': 'new-reg',
|
||||
'agreement': self.agreement,
|
||||
'contact': contact
|
||||
}
|
||||
|
||||
result, info = self.send_signed_request(self.directory['new-reg'], new_reg)
|
||||
if 'location' in info:
|
||||
self.uri = info['location']
|
||||
|
||||
if info['status'] in [200,201]:
|
||||
# Account did not exist
|
||||
self.changed = True
|
||||
return True
|
||||
elif info['status'] == 409:
|
||||
# Account did exist
|
||||
return False
|
||||
else:
|
||||
self.module.fail_json(msg="Error registering: {0} {1}".format(info['status'], result))
|
||||
|
||||
def init_account(self):
|
||||
'''
|
||||
Create or update an account on the ACME server. As the only way
|
||||
(without knowing an account URI) to test if an account exists
|
||||
is to try and create one with the provided account key, this
|
||||
method will always result in an account being present (except
|
||||
on error situations). If the account already exists, it will
|
||||
update the contact information.
|
||||
https://tools.ietf.org/html/draft-ietf-acme-acme-02#section-6.3
|
||||
'''
|
||||
|
||||
contact = []
|
||||
if self.email:
|
||||
contact.append('mailto:' + self.email)
|
||||
|
||||
# if this is not a new registration (e.g. existing account)
|
||||
if not self._new_reg(contact):
|
||||
# pre-existing account, get account data...
|
||||
result, _ = self.send_signed_request(self.uri, {'resource':'reg'})
|
||||
|
||||
# XXX: letsencrypt/boulder#1435
|
||||
if 'authorizations' in result:
|
||||
self._authz_list_uri = result['authorizations']
|
||||
if 'certificates' in result:
|
||||
self._certs_list_uri = result['certificates']
|
||||
|
||||
# ...and check if update is necessary
|
||||
do_update = False
|
||||
if 'contact' in result:
|
||||
if cmp(contact,result['contact']) != 0:
|
||||
do_update = True
|
||||
elif len(contact) > 0:
|
||||
do_update = True
|
||||
|
||||
if do_update:
|
||||
upd_reg = result
|
||||
upd_reg['contact'] = contact
|
||||
result, _ = self.send_signed_request(self.uri, upd_reg)
|
||||
self.changed = True
|
||||
|
||||
def get_authorizations(self):
|
||||
'''
|
||||
Return a list of currently active authorizations
|
||||
https://tools.ietf.org/html/draft-ietf-acme-acme-02#section-6.4
|
||||
'''
|
||||
authz_list = {'authorizations': []}
|
||||
if self._authz_list_uri is None:
|
||||
# XXX: letsencrypt/boulder#1435
|
||||
# Workaround, retrieve the known authorization urls
|
||||
# from the data attribute
|
||||
# It is also a way to limit the queried authorizations, which
|
||||
# might become relevant at some point
|
||||
if (self.data is not None) and ('authorizations' in self.data):
|
||||
for auth in self.data['authorizations']:
|
||||
authz_list['authorizations'].append(auth['uri'])
|
||||
else:
|
||||
return []
|
||||
else:
|
||||
# TODO: need to handle pagination
|
||||
authz_list = simple_get(self.module, self._authz_list_uri)
|
||||
|
||||
authz = []
|
||||
for auth_uri in authz_list['authorizations']:
|
||||
auth = simple_get(self.module,auth_uri)
|
||||
auth['uri'] = auth_uri
|
||||
authz.append(auth)
|
||||
|
||||
return authz
|
||||
|
||||
class ACMEClient(object):
|
||||
'''
|
||||
ACME client class. Uses an ACME account object and a CSR to
|
||||
start and validate ACME challenges and download the respective
|
||||
certificates.
|
||||
'''
|
||||
def __init__(self,module):
|
||||
self.module = module
|
||||
self.challenge = module.params['challenge']
|
||||
self.csr = module.params['csr']
|
||||
self.dest = module.params['dest']
|
||||
self.account = ACMEAccount(module)
|
||||
self.directory = self.account.directory
|
||||
self.authorizations = self.account.get_authorizations()
|
||||
self.cert_days = -1
|
||||
self.changed = self.account.changed
|
||||
|
||||
if not os.path.exists(self.csr):
|
||||
module.fail_json(msg="CSR %s not found" % (self.csr))
|
||||
|
||||
self._openssl_bin = module.get_bin_path('openssl', True)
|
||||
self.domains = self._get_csr_domains()
|
||||
|
||||
def _get_csr_domains(self):
|
||||
'''
|
||||
Parse the CSR and return the list of requested domains
|
||||
'''
|
||||
openssl_csr_cmd = [self._openssl_bin, "req", "-in", self.csr, "-noout", "-text"]
|
||||
_, out, _ = self.module.run_command(openssl_csr_cmd,check_rc=True)
|
||||
|
||||
domains = set([])
|
||||
common_name = re.search(r"Subject:.*? CN=([^\s,;/]+)", out.decode('utf8'))
|
||||
if common_name is not None:
|
||||
domains.add(common_name.group(1))
|
||||
subject_alt_names = re.search(r"X509v3 Subject Alternative Name: \n +([^\n]+)\n", out.decode('utf8'), re.MULTILINE|re.DOTALL)
|
||||
if subject_alt_names is not None:
|
||||
for san in subject_alt_names.group(1).split(", "):
|
||||
if san.startswith("DNS:"):
|
||||
domains.add(san[4:])
|
||||
return domains
|
||||
|
||||
|
||||
def _get_domain_auth(self,domain):
|
||||
'''
|
||||
Get the status string of the first authorization for the given domain.
|
||||
Return None if no active authorization for the given domain was found.
|
||||
'''
|
||||
if self.authorizations is None:
|
||||
return None
|
||||
|
||||
for auth in self.authorizations:
|
||||
if (auth['identifier']['type'] == 'dns') and (auth['identifier']['value'] == domain):
|
||||
return auth
|
||||
return None
|
||||
|
||||
def _add_or_update_auth(self,auth):
|
||||
'''
|
||||
Add or update the given authroization in the global authorizations list.
|
||||
Return True if the auth was updated/added and False if no change was
|
||||
necessary.
|
||||
'''
|
||||
for index,cur_auth in enumerate(self.authorizations):
|
||||
if (cur_auth['uri'] == auth['uri']):
|
||||
# does the auth parameter contain updated data?
|
||||
if cmp(cur_auth,auth) != 0:
|
||||
# yes, update our current authorization list
|
||||
self.authorizations[index] = auth
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
# this is a new authorization, add it to the list of current
|
||||
# authorizations
|
||||
self.authorizations.append(auth)
|
||||
return True
|
||||
|
||||
def _new_authz(self,domain):
|
||||
'''
|
||||
Create a new authorization for the given domain.
|
||||
Return the authorization object of the new authorization
|
||||
https://tools.ietf.org/html/draft-ietf-acme-acme-02#section-6.4
|
||||
'''
|
||||
if self.account.uri is None:
|
||||
return
|
||||
|
||||
new_authz = {
|
||||
"resource": "new-authz",
|
||||
"identifier": {"type": "dns", "value": domain},
|
||||
}
|
||||
|
||||
result, info = self.account.send_signed_request(self.directory['new-authz'], new_authz)
|
||||
if info['status'] not in [200,201]:
|
||||
self.module.fail_json(msg="Error requesting challenges: CODE: {0} RESULT: {1}".format(info['status'], result))
|
||||
else:
|
||||
result['uri'] = info['location']
|
||||
return result
|
||||
|
||||
def _get_challenge_data(self,auth):
|
||||
'''
|
||||
Returns a dict with the data for all proposed (and supported) challenges
|
||||
of the given authorization.
|
||||
'''
|
||||
|
||||
data = {}
|
||||
# no need to choose a specific challenge here as this module
|
||||
# is not responsible for fulfilling the challenges. Calculate
|
||||
# and return the required information for each challenge.
|
||||
for challenge in auth['challenges']:
|
||||
type = challenge['type']
|
||||
token = re.sub(r"[^A-Za-z0-9_\-]", "_", challenge['token'])
|
||||
keyauthorization = self.account.get_keyauthorization(token)
|
||||
|
||||
# NOTE: tls-sni-01 is not supported by choice
|
||||
# too complex to be usefull and tls-sni-02 is an alternative
|
||||
# as soon as it is implemented server side
|
||||
if type == 'http-01':
|
||||
# https://tools.ietf.org/html/draft-ietf-acme-acme-02#section-7.2
|
||||
resource = '.well-known/acme-challenge/' + token
|
||||
value = keyauthorization
|
||||
elif type == 'tls-sni-02':
|
||||
# https://tools.ietf.org/html/draft-ietf-acme-acme-02#section-7.3
|
||||
token_digest = hashlib.sha256(token.encode('utf8')).hexdigest()
|
||||
ka_digest = hashlib.sha256(keyauthorization.encode('utf8')).hexdigest()
|
||||
len_token_digest = len(token_digest)
|
||||
len_ka_digest = len(ka_digest)
|
||||
resource = 'subjectAlternativeNames'
|
||||
value = [
|
||||
"{0}.{1}.token.acme.invalid".format(token_digest[:len_token_digest/2],token_digest[len_token_digest/2:]),
|
||||
"{0}.{1}.ka.acme.invalid".format(ka_digest[:len_ka_digest/2],ka_digest[len_ka_digest/2:]),
|
||||
]
|
||||
elif type == 'dns-01':
|
||||
# https://tools.ietf.org/html/draft-ietf-acme-acme-02#section-7.4
|
||||
resource = '_acme-challenge'
|
||||
value = nopad_b64(hashlib.sha256(keyauthorization).digest()).encode('utf8')
|
||||
else:
|
||||
continue
|
||||
|
||||
data[type] = { 'resource': resource, 'resource_value': value }
|
||||
return data
|
||||
|
||||
def _validate_challenges(self,auth):
|
||||
'''
|
||||
Validate the authorization provided in the auth dict. Returns True
|
||||
when the validation was successfull and False when it was not.
|
||||
'''
|
||||
for challenge in auth['challenges']:
|
||||
if self.challenge != challenge['type']:
|
||||
continue
|
||||
|
||||
uri = challenge['uri']
|
||||
token = re.sub(r"[^A-Za-z0-9_\-]", "_", challenge['token'])
|
||||
keyauthorization = self.account.get_keyauthorization(token)
|
||||
|
||||
challenge_response = {
|
||||
"resource": "challenge",
|
||||
"keyAuthorization": keyauthorization,
|
||||
}
|
||||
result, info = self.account.send_signed_request(uri, challenge_response)
|
||||
if info['status'] not in [200,202]:
|
||||
self.module.fail_json(msg="Error validating challenge: CODE: {0} RESULT: {1}".format(info['status'], result))
|
||||
|
||||
status = ''
|
||||
|
||||
while status not in ['valid','invalid','revoked']:
|
||||
result = simple_get(self.module,auth['uri'])
|
||||
result['uri'] = auth['uri']
|
||||
if self._add_or_update_auth(result):
|
||||
self.changed = True
|
||||
# draft-ietf-acme-acme-02
|
||||
# "status (required, string): ...
|
||||
# If this field is missing, then the default value is "pending"."
|
||||
if 'status' not in result:
|
||||
status = 'pending'
|
||||
else:
|
||||
status = result['status']
|
||||
time.sleep(2)
|
||||
|
||||
if status == 'invalid':
|
||||
error_details = ''
|
||||
# multiple challenges could have failed at this point, gather error
|
||||
# details for all of them before failing
|
||||
for challenge in result['challenges']:
|
||||
if challenge['status'] == 'invalid':
|
||||
error_details += ' CHALLENGE: {0}'.format(challenge['type'])
|
||||
if 'error' in challenge:
|
||||
error_details += ' DETAILS: {0};'.format(challenge['error']['detail'])
|
||||
else:
|
||||
error_details += ';'
|
||||
self.module.fail_json(msg="Authorization for {0} returned invalid: {1}".format(result['identifier']['value'],error_details))
|
||||
|
||||
return status == 'valid'
|
||||
|
||||
def _new_cert(self):
|
||||
'''
|
||||
Create a new certificate based on the csr.
|
||||
Return the certificate object as dict
|
||||
https://tools.ietf.org/html/draft-ietf-acme-acme-02#section-6.5
|
||||
'''
|
||||
openssl_csr_cmd = [self._openssl_bin, "req", "-in", self.csr, "-outform", "DER"]
|
||||
_, out, _ = self.module.run_command(openssl_csr_cmd,check_rc=True)
|
||||
|
||||
new_cert = {
|
||||
"resource": "new-cert",
|
||||
"csr": nopad_b64(out),
|
||||
}
|
||||
result, info = self.account.send_signed_request(self.directory['new-cert'], new_cert)
|
||||
if info['status'] not in [200,201]:
|
||||
self.module.fail_json(msg="Error new cert: CODE: {0} RESULT: {1}".format(info['status'], result))
|
||||
else:
|
||||
return {'cert': result, 'uri': info['location']}
|
||||
|
||||
def _der_to_pem(self,der_cert):
|
||||
'''
|
||||
Convert the DER format certificate in der_cert to a PEM format
|
||||
certificate and return it.
|
||||
'''
|
||||
return """-----BEGIN CERTIFICATE-----\n{0}\n-----END CERTIFICATE-----\n""".format(
|
||||
"\n".join(textwrap.wrap(base64.b64encode(der_cert).decode('utf8'), 64)))
|
||||
|
||||
def do_challenges(self):
|
||||
'''
|
||||
Create new authorizations for all domains of the CSR and return
|
||||
the challenge details for the choosen challenge type.
|
||||
'''
|
||||
data = {}
|
||||
for domain in self.domains:
|
||||
auth = self._get_domain_auth(domain)
|
||||
if auth is None:
|
||||
new_auth = self._new_authz(domain)
|
||||
self._add_or_update_auth(new_auth)
|
||||
data[domain] = self._get_challenge_data(new_auth)
|
||||
self.changed = True
|
||||
elif (auth['status'] == 'pending') or ('status' not in auth):
|
||||
# draft-ietf-acme-acme-02
|
||||
# "status (required, string): ...
|
||||
# If this field is missing, then the default value is "pending"."
|
||||
self._validate_challenges(auth)
|
||||
# _validate_challenges updates the global authrozation dict,
|
||||
# so get the current version of the authorization we are working
|
||||
# on to retrieve the challenge data
|
||||
data[domain] = self._get_challenge_data(self._get_domain_auth(domain))
|
||||
|
||||
return data
|
||||
|
||||
def get_certificate(self):
|
||||
'''
|
||||
Request a new certificate and write it to the destination file.
|
||||
Only do this if a destination file was provided and if all authorizations
|
||||
for the domains of the csr are valid. No Return value.
|
||||
'''
|
||||
if self.dest is None:
|
||||
return
|
||||
|
||||
for domain in self.domains:
|
||||
auth = self._get_domain_auth(domain)
|
||||
if auth is None or ('status' not in auth) or (auth['status'] != 'valid'):
|
||||
return
|
||||
|
||||
cert = self._new_cert()
|
||||
if cert['cert'] is not None:
|
||||
pem_cert = self._der_to_pem(cert['cert'])
|
||||
if write_file(self.module,self.dest,pem_cert):
|
||||
self.cert_days = get_cert_days(self.module,self.dest)
|
||||
self.changed = True
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec = dict(
|
||||
account_key = dict(required=True, type='path'),
|
||||
account_email = dict(required=False, default=None, type='str'),
|
||||
acme_directory = dict(required=False, default='https://acme-staging.api.letsencrypt.org/directory', type='str'),
|
||||
agreement = dict(required=False, default='https://letsencrypt.org/documents/LE-SA-v1.1.1-August-1-2016.pdf', type='str'),
|
||||
challenge = dict(required=False, default='http-01', choices=['http-01', 'dns-01', 'tls-sni-02'], type='str'),
|
||||
csr = dict(required=True, aliases=['src'], type='path'),
|
||||
data = dict(required=False, no_log=True, default=None, type='dict'),
|
||||
dest = dict(required=True, aliases=['cert'], type='path'),
|
||||
remaining_days = dict(required=False, default=10, type='int'),
|
||||
),
|
||||
supports_check_mode = True,
|
||||
)
|
||||
|
||||
# AnsibleModule() changes the locale, so change it back to C because we rely on time.strptime() when parsing certificate dates.
|
||||
locale.setlocale(locale.LC_ALL, "C")
|
||||
|
||||
cert_days = get_cert_days(module,module.params['dest'])
|
||||
if cert_days < module.params['remaining_days']:
|
||||
# If checkmode is active, base the changed state solely on the status
|
||||
# of the certificate file as all other actions (accessing an account, checking
|
||||
# the authorization status...) would lead to potential changes of the current
|
||||
# state
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True,authorizations={},
|
||||
challenge_data={},cert_days=cert_days)
|
||||
else:
|
||||
client = ACMEClient(module)
|
||||
client.cert_days = cert_days
|
||||
data = client.do_challenges()
|
||||
client.get_certificate()
|
||||
module.exit_json(changed=client.changed,authorizations=client.authorizations,
|
||||
challenge_data=data,cert_days=client.cert_days)
|
||||
else:
|
||||
module.exit_json(changed=False,cert_days=cert_days)
|
||||
|
||||
# import module snippets
|
||||
from ansible.module_utils.basic import *
|
||||
from ansible.module_utils.urls import *
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
164
lib/ansible/modules/web_infrastructure/nginx_status_facts.py
Normal file
164
lib/ansible/modules/web_infrastructure/nginx_status_facts.py
Normal file
@@ -0,0 +1,164 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# (c) 2016, René Moser <mail@renemoser.net>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
ANSIBLE_METADATA = {'status': ['preview'],
|
||||
'supported_by': 'community',
|
||||
'version': '1.0'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: nginx_status_facts
|
||||
short_description: Retrieve nginx status facts.
|
||||
description:
|
||||
- Gathers facts from nginx from an URL having C(stub_status) enabled.
|
||||
version_added: "2.3"
|
||||
author: "René Moser (@resmo)"
|
||||
options:
|
||||
url:
|
||||
description:
|
||||
- URL of the nginx status.
|
||||
required: true
|
||||
timeout:
|
||||
description:
|
||||
- HTTP connection timeout in seconds.
|
||||
required: false
|
||||
default: 10
|
||||
|
||||
notes:
|
||||
- See http://nginx.org/en/docs/http/ngx_http_stub_status_module.html for more information.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Gather status facts from nginx on localhost
|
||||
- name: get current http stats
|
||||
nginx_status_facts:
|
||||
url: http://localhost/nginx_status
|
||||
|
||||
# Gather status facts from nginx on localhost with a custom timeout of 20 seconds
|
||||
- name: get current http stats
|
||||
nginx_status_facts:
|
||||
url: http://localhost/nginx_status
|
||||
timeout: 20
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
---
|
||||
nginx_status_facts.active_connections:
|
||||
description: Active connections.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 2340
|
||||
nginx_status_facts.accepts:
|
||||
description: The total number of accepted client connections.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 81769947
|
||||
nginx_status_facts.handled:
|
||||
description: The total number of handled connections. Generally, the parameter value is the same as accepts unless some resource limits have been reached.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 81769947
|
||||
nginx_status_facts.requests:
|
||||
description: The total number of client requests.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 144332345
|
||||
nginx_status_facts.reading:
|
||||
description: The current number of connections where nginx is reading the request header.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 0
|
||||
nginx_status_facts.writing:
|
||||
description: The current number of connections where nginx is writing the response back to the client.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 241
|
||||
nginx_status_facts.waiting:
|
||||
description: The current number of idle client connections waiting for a request.
|
||||
returned: success
|
||||
type: int
|
||||
sample: 2092
|
||||
nginx_status_facts.data:
|
||||
description: HTTP response as is.
|
||||
returned: success
|
||||
type: string
|
||||
sample: "Active connections: 2340 \nserver accepts handled requests\n 81769947 81769947 144332345 \nReading: 0 Writing: 241 Waiting: 2092 \n"
|
||||
'''
|
||||
|
||||
import re
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
|
||||
|
||||
class NginxStatusFacts(object):
|
||||
|
||||
def __init__(self):
|
||||
self.url = module.params.get('url')
|
||||
self.timeout = module.params.get('timeout')
|
||||
|
||||
def run(self):
|
||||
result = {
|
||||
'nginx_status_facts': {
|
||||
'active_connections': None,
|
||||
'accepts': None,
|
||||
'handled': None,
|
||||
'requests': None,
|
||||
'reading': None,
|
||||
'writing': None,
|
||||
'waiting': None,
|
||||
'data': None,
|
||||
}
|
||||
}
|
||||
(response, info) = fetch_url(module=module, url=self.url, force=True, timeout=self.timeout)
|
||||
if not response:
|
||||
module.fail_json(msg="No valid or no response from url %s within %s seconds (timeout)" % (self.url, self.timeout))
|
||||
|
||||
data = response.read()
|
||||
if not data:
|
||||
return result
|
||||
|
||||
result['nginx_status_facts']['data'] = data
|
||||
match = re.match(r'Active connections: ([0-9]+) \nserver accepts handled requests\n ([0-9]+) ([0-9]+) ([0-9]+) \nReading: ([0-9]+) Writing: ([0-9]+) Waiting: ([0-9]+)', data, re.S)
|
||||
if match:
|
||||
result['nginx_status_facts']['active_connections'] = int(match.group(1))
|
||||
result['nginx_status_facts']['accepts'] = int(match.group(2))
|
||||
result['nginx_status_facts']['handled'] = int(match.group(3))
|
||||
result['nginx_status_facts']['requests'] = int(match.group(4))
|
||||
result['nginx_status_facts']['reading'] = int(match.group(5))
|
||||
result['nginx_status_facts']['writing'] = int(match.group(6))
|
||||
result['nginx_status_facts']['waiting'] = int(match.group(7))
|
||||
return result
|
||||
|
||||
def main():
|
||||
global module
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
url=dict(required=True),
|
||||
timeout=dict(type='int', default=10),
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
nginx_status_facts = NginxStatusFacts().run()
|
||||
result = dict(changed=False, ansible_facts=nginx_status_facts)
|
||||
module.exit_json(**result)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
317
lib/ansible/modules/web_infrastructure/taiga_issue.py
Normal file
317
lib/ansible/modules/web_infrastructure/taiga_issue.py
Normal file
@@ -0,0 +1,317 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2015, Alejandro Guirao <lekumberri@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible.
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
ANSIBLE_METADATA = {'status': ['preview'],
|
||||
'supported_by': 'community',
|
||||
'version': '1.0'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: taiga_issue
|
||||
short_description: Creates/deletes an issue in a Taiga Project Management Platform
|
||||
description:
|
||||
- Creates/deletes an issue in a Taiga Project Management Platform (U(https://taiga.io)).
|
||||
- An issue is identified by the combination of project, issue subject and issue type.
|
||||
- This module implements the creation or deletion of issues (not the update).
|
||||
version_added: "2.0"
|
||||
options:
|
||||
taiga_host:
|
||||
description:
|
||||
- The hostname of the Taiga instance.
|
||||
required: False
|
||||
default: https://api.taiga.io
|
||||
project:
|
||||
description:
|
||||
- Name of the project containing the issue. Must exist previously.
|
||||
required: True
|
||||
subject:
|
||||
description:
|
||||
- The issue subject.
|
||||
required: True
|
||||
issue_type:
|
||||
description:
|
||||
- The issue type. Must exist previously.
|
||||
required: True
|
||||
priority:
|
||||
description:
|
||||
- The issue priority. Must exist previously.
|
||||
required: False
|
||||
default: Normal
|
||||
status:
|
||||
description:
|
||||
- The issue status. Must exist previously.
|
||||
required: False
|
||||
default: New
|
||||
severity:
|
||||
description:
|
||||
- The issue severity. Must exist previously.
|
||||
required: False
|
||||
default: Normal
|
||||
description:
|
||||
description:
|
||||
- The issue description.
|
||||
required: False
|
||||
default: ""
|
||||
attachment:
|
||||
description:
|
||||
- Path to a file to be attached to the issue.
|
||||
required: False
|
||||
default: None
|
||||
attachment_description:
|
||||
description:
|
||||
- A string describing the file to be attached to the issue.
|
||||
required: False
|
||||
default: ""
|
||||
tags:
|
||||
description:
|
||||
- A lists of tags to be assigned to the issue.
|
||||
required: False
|
||||
default: []
|
||||
state:
|
||||
description:
|
||||
- Whether the issue should be present or not.
|
||||
required: False
|
||||
choices: ["present", "absent"]
|
||||
default: present
|
||||
author: Alejandro Guirao (@lekum)
|
||||
requirements: [python-taiga]
|
||||
notes:
|
||||
- The authentication is achieved either by the environment variable TAIGA_TOKEN or by the pair of environment variables TAIGA_USERNAME and TAIGA_PASSWORD
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Create an issue in the my hosted Taiga environment and attach an error log
|
||||
- taiga_issue:
|
||||
taiga_host: https://mytaigahost.example.com
|
||||
project: myproject
|
||||
subject: An error has been found
|
||||
issue_type: Bug
|
||||
priority: High
|
||||
status: New
|
||||
severity: Important
|
||||
description: An error has been found. Please check the attached error log for details.
|
||||
attachment: /path/to/error.log
|
||||
attachment_description: Error log file
|
||||
tags:
|
||||
- Error
|
||||
- Needs manual check
|
||||
state: present
|
||||
|
||||
# Deletes the previously created issue
|
||||
- taiga_issue:
|
||||
taiga_host: https://mytaigahost.example.com
|
||||
project: myproject
|
||||
subject: An error has been found
|
||||
issue_type: Bug
|
||||
state: absent
|
||||
'''
|
||||
|
||||
RETURN = '''# '''
|
||||
from os import getenv
|
||||
from os.path import isfile
|
||||
|
||||
try:
|
||||
from taiga import TaigaAPI
|
||||
from taiga.exceptions import TaigaException
|
||||
TAIGA_MODULE_IMPORTED=True
|
||||
except ImportError:
|
||||
TAIGA_MODULE_IMPORTED=False
|
||||
|
||||
def manage_issue(module, taiga_host, project_name, issue_subject, issue_priority,
|
||||
issue_status, issue_type, issue_severity, issue_description,
|
||||
issue_attachment, issue_attachment_description,
|
||||
issue_tags, state, check_mode=False):
|
||||
"""
|
||||
Method that creates/deletes issues depending whether they exist and the state desired
|
||||
|
||||
The credentials should be passed via environment variables:
|
||||
- TAIGA_TOKEN
|
||||
- TAIGA_USERNAME and TAIGA_PASSWORD
|
||||
|
||||
Returns a tuple with these elements:
|
||||
- A boolean representing the success of the operation
|
||||
- A descriptive message
|
||||
- A dict with the issue attributes, in case of issue creation, otherwise empty dict
|
||||
"""
|
||||
|
||||
changed = False
|
||||
|
||||
try:
|
||||
token = getenv('TAIGA_TOKEN')
|
||||
if token:
|
||||
api = TaigaAPI(host=taiga_host, token=token)
|
||||
else:
|
||||
api = TaigaAPI(host=taiga_host)
|
||||
username = getenv('TAIGA_USERNAME')
|
||||
password = getenv('TAIGA_PASSWORD')
|
||||
if not any([username, password]):
|
||||
return (False, changed, "Missing credentials", {})
|
||||
api.auth(username=username, password=password)
|
||||
|
||||
user_id = api.me().id
|
||||
project_list = filter(lambda x: x.name == project_name, api.projects.list(member=user_id))
|
||||
if len(project_list) != 1:
|
||||
return (False, changed, "Unable to find project %s" % project_name, {})
|
||||
project = project_list[0]
|
||||
project_id = project.id
|
||||
|
||||
priority_list = filter(lambda x: x.name == issue_priority, api.priorities.list(project=project_id))
|
||||
if len(priority_list) != 1:
|
||||
return (False, changed, "Unable to find issue priority %s for project %s" % (issue_priority, project_name), {})
|
||||
priority_id = priority_list[0].id
|
||||
|
||||
status_list = filter(lambda x: x.name == issue_status, api.issue_statuses.list(project=project_id))
|
||||
if len(status_list) != 1:
|
||||
return (False, changed, "Unable to find issue status %s for project %s" % (issue_status, project_name), {})
|
||||
status_id = status_list[0].id
|
||||
|
||||
type_list = filter(lambda x: x.name == issue_type, project.list_issue_types())
|
||||
if len(type_list) != 1:
|
||||
return (False, changed, "Unable to find issue type %s for project %s" % (issue_type, project_name), {})
|
||||
type_id = type_list[0].id
|
||||
|
||||
severity_list = filter(lambda x: x.name == issue_severity, project.list_severities())
|
||||
if len(severity_list) != 1:
|
||||
return (False, changed, "Unable to find severity %s for project %s" % (issue_severity, project_name), {})
|
||||
severity_id = severity_list[0].id
|
||||
|
||||
issue = {
|
||||
"project": project_name,
|
||||
"subject": issue_subject,
|
||||
"priority": issue_priority,
|
||||
"status": issue_status,
|
||||
"type": issue_type,
|
||||
"severity": issue_severity,
|
||||
"description": issue_description,
|
||||
"tags": issue_tags,
|
||||
}
|
||||
|
||||
# An issue is identified by the project_name, the issue_subject and the issue_type
|
||||
matching_issue_list = filter(lambda x: x.subject == issue_subject and x.type == type_id, project.list_issues())
|
||||
matching_issue_list_len = len(matching_issue_list)
|
||||
|
||||
if matching_issue_list_len == 0:
|
||||
# The issue does not exist in the project
|
||||
if state == "present":
|
||||
# This implies a change
|
||||
changed = True
|
||||
if not check_mode:
|
||||
# Create the issue
|
||||
new_issue = project.add_issue(issue_subject, priority_id, status_id, type_id, severity_id, tags=issue_tags, description=issue_description)
|
||||
if issue_attachment:
|
||||
new_issue.attach(issue_attachment, description=issue_attachment_description)
|
||||
issue["attachment"] = issue_attachment
|
||||
issue["attachment_description"] = issue_attachment_description
|
||||
return (True, changed, "Issue created", issue)
|
||||
|
||||
else:
|
||||
# If does not exist, do nothing
|
||||
return (True, changed, "Issue does not exist", {})
|
||||
|
||||
elif matching_issue_list_len == 1:
|
||||
# The issue exists in the project
|
||||
if state == "absent":
|
||||
# This implies a change
|
||||
changed = True
|
||||
if not check_mode:
|
||||
# Delete the issue
|
||||
matching_issue_list[0].delete()
|
||||
return (True, changed, "Issue deleted", {})
|
||||
|
||||
else:
|
||||
# Do nothing
|
||||
return (True, changed, "Issue already exists", {})
|
||||
|
||||
else:
|
||||
# More than 1 matching issue
|
||||
return (False, changed, "More than one issue with subject %s in project %s" % (issue_subject, project_name), {})
|
||||
|
||||
except TaigaException:
|
||||
msg = "An exception happened: %s" % sys.exc_info()[1]
|
||||
return (False, changed, msg, {})
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
taiga_host=dict(required=False, default="https://api.taiga.io"),
|
||||
project=dict(required=True),
|
||||
subject=dict(required=True),
|
||||
issue_type=dict(required=True),
|
||||
priority=dict(required=False, default="Normal"),
|
||||
status=dict(required=False, default="New"),
|
||||
severity=dict(required=False, default="Normal"),
|
||||
description=dict(required=False, default=""),
|
||||
attachment=dict(required=False, default=None),
|
||||
attachment_description=dict(required=False, default=""),
|
||||
tags=dict(required=False, default=[], type='list'),
|
||||
state=dict(required=False, choices=['present','absent'], default='present'),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if not TAIGA_MODULE_IMPORTED:
|
||||
msg = "This module needs python-taiga module"
|
||||
module.fail_json(msg=msg)
|
||||
|
||||
taiga_host = module.params['taiga_host']
|
||||
project_name = module.params['project']
|
||||
issue_subject = module.params['subject']
|
||||
issue_priority = module.params['priority']
|
||||
issue_status = module.params['status']
|
||||
issue_type = module.params['issue_type']
|
||||
issue_severity = module.params['severity']
|
||||
issue_description = module.params['description']
|
||||
issue_attachment = module.params['attachment']
|
||||
issue_attachment_description = module.params['attachment_description']
|
||||
if issue_attachment:
|
||||
if not isfile(issue_attachment):
|
||||
msg = "%s is not a file" % issue_attachment
|
||||
module.fail_json(msg=msg)
|
||||
issue_tags = module.params['tags']
|
||||
state = module.params['state']
|
||||
|
||||
return_status, changed, msg, issue_attr_dict = manage_issue(
|
||||
module,
|
||||
taiga_host,
|
||||
project_name,
|
||||
issue_subject,
|
||||
issue_priority,
|
||||
issue_status,
|
||||
issue_type,
|
||||
issue_severity,
|
||||
issue_description,
|
||||
issue_attachment,
|
||||
issue_attachment_description,
|
||||
issue_tags,
|
||||
state,
|
||||
check_mode=module.check_mode
|
||||
)
|
||||
if return_status:
|
||||
if len(issue_attr_dict) > 0:
|
||||
module.exit_json(changed=changed, msg=msg, issue=issue_attr_dict)
|
||||
else:
|
||||
module.exit_json(changed=changed, msg=msg)
|
||||
else:
|
||||
module.fail_json(msg=msg)
|
||||
|
||||
|
||||
from ansible.module_utils.basic import *
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Reference in New Issue
Block a user