mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-05-06 13:22:48 +00:00
Initial commit
This commit is contained in:
125
plugins/modules/monitoring/airbrake_deployment.py
Normal file
125
plugins/modules/monitoring/airbrake_deployment.py
Normal file
@@ -0,0 +1,125 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2013 Bruce Pennypacker <bruce@pennypacker.org>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: airbrake_deployment
|
||||
author: "Bruce Pennypacker (@bpennypacker)"
|
||||
short_description: Notify airbrake about app deployments
|
||||
description:
|
||||
- Notify airbrake about app deployments (see http://help.airbrake.io/kb/api-2/deploy-tracking)
|
||||
options:
|
||||
token:
|
||||
description:
|
||||
- API token.
|
||||
required: true
|
||||
environment:
|
||||
description:
|
||||
- The airbrake environment name, typically 'production', 'staging', etc.
|
||||
required: true
|
||||
user:
|
||||
description:
|
||||
- The username of the person doing the deployment
|
||||
required: false
|
||||
repo:
|
||||
description:
|
||||
- URL of the project repository
|
||||
required: false
|
||||
revision:
|
||||
description:
|
||||
- A hash, number, tag, or other identifier showing what revision was deployed
|
||||
required: false
|
||||
url:
|
||||
description:
|
||||
- Optional URL to submit the notification to. Use to send notifications to Airbrake-compliant tools like Errbit.
|
||||
required: false
|
||||
default: "https://airbrake.io/deploys.txt"
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates for the target url will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
required: false
|
||||
default: 'yes'
|
||||
type: bool
|
||||
|
||||
requirements: []
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- airbrake_deployment:
|
||||
token: AAAAAA
|
||||
environment: staging
|
||||
user: ansible
|
||||
revision: '4.2'
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||
|
||||
|
||||
# ===========================================
|
||||
# Module execution.
|
||||
#
|
||||
|
||||
def main():
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
token=dict(required=True, no_log=True),
|
||||
environment=dict(required=True),
|
||||
user=dict(required=False),
|
||||
repo=dict(required=False),
|
||||
revision=dict(required=False),
|
||||
url=dict(required=False, default='https://api.airbrake.io/deploys.txt'),
|
||||
validate_certs=dict(default='yes', type='bool'),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
# build list of params
|
||||
params = {}
|
||||
|
||||
if module.params["environment"]:
|
||||
params["deploy[rails_env]"] = module.params["environment"]
|
||||
|
||||
if module.params["user"]:
|
||||
params["deploy[local_username]"] = module.params["user"]
|
||||
|
||||
if module.params["repo"]:
|
||||
params["deploy[scm_repository]"] = module.params["repo"]
|
||||
|
||||
if module.params["revision"]:
|
||||
params["deploy[scm_revision]"] = module.params["revision"]
|
||||
|
||||
params["api_key"] = module.params["token"]
|
||||
|
||||
url = module.params.get('url')
|
||||
|
||||
# If we're in check mode, just exit pretending like we succeeded
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
|
||||
# Send the data to airbrake
|
||||
data = urlencode(params)
|
||||
response, info = fetch_url(module, url, data=data)
|
||||
if info['status'] == 200:
|
||||
module.exit_json(changed=True)
|
||||
else:
|
||||
module.fail_json(msg="HTTP result code: %d connecting to %s" % (info['status'], url))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
203
plugins/modules/monitoring/bigpanda.py
Normal file
203
plugins/modules/monitoring/bigpanda.py
Normal file
@@ -0,0 +1,203 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright: Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: bigpanda
|
||||
author: "Hagai Kariti (@hkariti)"
|
||||
short_description: Notify BigPanda about deployments
|
||||
description:
|
||||
- Notify BigPanda when deployments start and end (successfully or not). Returns a deployment object containing all the parameters for future module calls.
|
||||
options:
|
||||
component:
|
||||
description:
|
||||
- "The name of the component being deployed. Ex: billing"
|
||||
required: true
|
||||
aliases: ['name']
|
||||
version:
|
||||
description:
|
||||
- The deployment version.
|
||||
required: true
|
||||
token:
|
||||
description:
|
||||
- API token.
|
||||
required: true
|
||||
state:
|
||||
description:
|
||||
- State of the deployment.
|
||||
required: true
|
||||
choices: ['started', 'finished', 'failed']
|
||||
hosts:
|
||||
description:
|
||||
- Name of affected host name. Can be a list.
|
||||
required: false
|
||||
default: machine's hostname
|
||||
aliases: ['host']
|
||||
env:
|
||||
description:
|
||||
- The environment name, typically 'production', 'staging', etc.
|
||||
required: false
|
||||
owner:
|
||||
description:
|
||||
- The person responsible for the deployment.
|
||||
required: false
|
||||
description:
|
||||
description:
|
||||
- Free text description of the deployment.
|
||||
required: false
|
||||
url:
|
||||
description:
|
||||
- Base URL of the API server.
|
||||
required: False
|
||||
default: https://api.bigpanda.io
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates for the target url will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
required: false
|
||||
default: 'yes'
|
||||
type: bool
|
||||
deployment_message:
|
||||
description:
|
||||
- Message about the deployment.
|
||||
- C(message) alias is deprecated in Ansible 2.10, since it is used internally by Ansible Core Engine.
|
||||
aliases: ['message']
|
||||
|
||||
# informational: requirements for nodes
|
||||
requirements: [ ]
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- bigpanda:
|
||||
component: myapp
|
||||
version: '1.3'
|
||||
token: '{{ bigpanda_token }}'
|
||||
state: started
|
||||
|
||||
- bigpanda:
|
||||
component: myapp
|
||||
version: '1.3'
|
||||
token: '{{ bigpanda_token }}'
|
||||
state: finished
|
||||
|
||||
# If outside servers aren't reachable from your machine, use delegate_to and override hosts:
|
||||
- bigpanda:
|
||||
component: myapp
|
||||
version: '1.3'
|
||||
token: '{{ bigpanda_token }}'
|
||||
hosts: '{{ ansible_hostname }}'
|
||||
state: started
|
||||
delegate_to: localhost
|
||||
register: deployment
|
||||
|
||||
- bigpanda:
|
||||
component: '{{ deployment.component }}'
|
||||
version: '{{ deployment.version }}'
|
||||
token: '{{ deployment.token }}'
|
||||
state: finished
|
||||
delegate_to: localhost
|
||||
'''
|
||||
|
||||
# ===========================================
|
||||
# Module execution.
|
||||
#
|
||||
import json
|
||||
import socket
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
component=dict(required=True, aliases=['name']),
|
||||
version=dict(required=True),
|
||||
token=dict(required=True, no_log=True),
|
||||
state=dict(required=True, choices=['started', 'finished', 'failed']),
|
||||
hosts=dict(required=False, default=[socket.gethostname()], aliases=['host']),
|
||||
env=dict(required=False),
|
||||
owner=dict(required=False),
|
||||
description=dict(required=False),
|
||||
deployment_message=dict(required=False, aliases=['message'], deprecated_aliases=[dict(name='message', version='2.14')]),
|
||||
source_system=dict(required=False, default='ansible'),
|
||||
validate_certs=dict(default='yes', type='bool'),
|
||||
url=dict(required=False, default='https://api.bigpanda.io'),
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
token = module.params['token']
|
||||
state = module.params['state']
|
||||
url = module.params['url']
|
||||
|
||||
# Build the common request body
|
||||
body = dict()
|
||||
for k in ('component', 'version', 'hosts'):
|
||||
v = module.params[k]
|
||||
if v is not None:
|
||||
body[k] = v
|
||||
|
||||
if not isinstance(body['hosts'], list):
|
||||
body['hosts'] = [body['hosts']]
|
||||
|
||||
# Insert state-specific attributes to body
|
||||
if state == 'started':
|
||||
for k in ('source_system', 'env', 'owner', 'description'):
|
||||
v = module.params[k]
|
||||
if v is not None:
|
||||
body[k] = v
|
||||
|
||||
request_url = url + '/data/events/deployments/start'
|
||||
else:
|
||||
message = module.params['message']
|
||||
if message is not None:
|
||||
body['errorMessage'] = message
|
||||
|
||||
if state == 'finished':
|
||||
body['status'] = 'success'
|
||||
else:
|
||||
body['status'] = 'failure'
|
||||
|
||||
request_url = url + '/data/events/deployments/end'
|
||||
|
||||
# Build the deployment object we return
|
||||
deployment = dict(token=token, url=url)
|
||||
deployment.update(body)
|
||||
if 'errorMessage' in deployment:
|
||||
message = deployment.pop('errorMessage')
|
||||
deployment['message'] = message
|
||||
|
||||
# If we're in check mode, just exit pretending like we succeeded
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True, **deployment)
|
||||
|
||||
# Send the data to bigpanda
|
||||
data = json.dumps(body)
|
||||
headers = {'Authorization': 'Bearer %s' % token, 'Content-Type': 'application/json'}
|
||||
try:
|
||||
response, info = fetch_url(module, request_url, data=data, headers=headers)
|
||||
if info['status'] == 200:
|
||||
module.exit_json(changed=True, **deployment)
|
||||
else:
|
||||
module.fail_json(msg=json.dumps(info))
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
230
plugins/modules/monitoring/circonus_annotation.py
Normal file
230
plugins/modules/monitoring/circonus_annotation.py
Normal file
@@ -0,0 +1,230 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# (c) 2014-2015, Epic Games, Inc.
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: circonus_annotation
|
||||
short_description: create an annotation in circonus
|
||||
description:
|
||||
- Create an annotation event with a given category, title and description. Optionally start, end or durations can be provided
|
||||
author: "Nick Harring (@NickatEpic)"
|
||||
requirements:
|
||||
- requests (either >= 2.0.0 for Python 3, or >= 1.0.0 for Python 2)
|
||||
notes:
|
||||
- Check mode isn't supported.
|
||||
options:
|
||||
api_key:
|
||||
description:
|
||||
- Circonus API key
|
||||
required: true
|
||||
category:
|
||||
description:
|
||||
- Annotation Category
|
||||
required: true
|
||||
description:
|
||||
description:
|
||||
- Description of annotation
|
||||
required: true
|
||||
title:
|
||||
description:
|
||||
- Title of annotation
|
||||
required: true
|
||||
start:
|
||||
description:
|
||||
- Unix timestamp of event start
|
||||
default: I(now)
|
||||
stop:
|
||||
description:
|
||||
- Unix timestamp of event end
|
||||
default: I(now) + I(duration)
|
||||
duration:
|
||||
description:
|
||||
- Duration in seconds of annotation
|
||||
default: 0
|
||||
'''
|
||||
EXAMPLES = '''
|
||||
# Create a simple annotation event with a source, defaults to start and end time of now
|
||||
- circonus_annotation:
|
||||
api_key: XXXXXXXXXXXXXXXXX
|
||||
title: App Config Change
|
||||
description: This is a detailed description of the config change
|
||||
category: This category groups like annotations
|
||||
# Create an annotation with a duration of 5 minutes and a default start time of now
|
||||
- circonus_annotation:
|
||||
api_key: XXXXXXXXXXXXXXXXX
|
||||
title: App Config Change
|
||||
description: This is a detailed description of the config change
|
||||
category: This category groups like annotations
|
||||
duration: 300
|
||||
# Create an annotation with a start_time and end_time
|
||||
- circonus_annotation:
|
||||
api_key: XXXXXXXXXXXXXXXXX
|
||||
title: App Config Change
|
||||
description: This is a detailed description of the config change
|
||||
category: This category groups like annotations
|
||||
start_time: 1395940006
|
||||
end_time: 1395954407
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
annotation:
|
||||
description: details about the created annotation
|
||||
returned: success
|
||||
type: complex
|
||||
contains:
|
||||
_cid:
|
||||
description: annotation identifier
|
||||
returned: success
|
||||
type: str
|
||||
sample: /annotation/100000
|
||||
_created:
|
||||
description: creation timestamp
|
||||
returned: success
|
||||
type: int
|
||||
sample: 1502236928
|
||||
_last_modified:
|
||||
description: last modification timestamp
|
||||
returned: success
|
||||
type: int
|
||||
sample: 1502236928
|
||||
_last_modified_by:
|
||||
description: last modified by
|
||||
returned: success
|
||||
type: str
|
||||
sample: /user/1000
|
||||
category:
|
||||
description: category of the created annotation
|
||||
returned: success
|
||||
type: str
|
||||
sample: alerts
|
||||
title:
|
||||
description: title of the created annotation
|
||||
returned: success
|
||||
type: str
|
||||
sample: WARNING
|
||||
description:
|
||||
description: description of the created annotation
|
||||
returned: success
|
||||
type: str
|
||||
sample: Host is down.
|
||||
start:
|
||||
description: timestamp, since annotation applies
|
||||
returned: success
|
||||
type: int
|
||||
sample: Host is down.
|
||||
stop:
|
||||
description: timestamp, since annotation ends
|
||||
returned: success
|
||||
type: str
|
||||
sample: Host is down.
|
||||
rel_metrics:
|
||||
description: Array of metrics related to this annotation, each metrics is a string.
|
||||
returned: success
|
||||
type: list
|
||||
sample:
|
||||
- 54321_kbps
|
||||
'''
|
||||
import json
|
||||
import time
|
||||
import traceback
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
REQUESTS_IMP_ERR = None
|
||||
try:
|
||||
import requests
|
||||
HAS_REQUESTS = True
|
||||
except ImportError:
|
||||
REQUESTS_IMP_ERR = traceback.format_exc()
|
||||
HAS_REQUESTS = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
from ansible.module_utils.six import PY3
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def check_requests_dep(module):
|
||||
"""Check if an adequate requests version is available"""
|
||||
if not HAS_REQUESTS:
|
||||
module.fail_json(msg=missing_required_lib('requests'), exception=REQUESTS_IMP_ERR)
|
||||
else:
|
||||
required_version = '2.0.0' if PY3 else '1.0.0'
|
||||
if LooseVersion(requests.__version__) < LooseVersion(required_version):
|
||||
module.fail_json(msg="'requests' library version should be >= %s, found: %s." % (required_version, requests.__version__))
|
||||
|
||||
|
||||
def post_annotation(annotation, api_key):
|
||||
''' Takes annotation dict and api_key string'''
|
||||
base_url = 'https://api.circonus.com/v2'
|
||||
anootate_post_endpoint = '/annotation'
|
||||
resp = requests.post(base_url + anootate_post_endpoint,
|
||||
headers=build_headers(api_key), data=json.dumps(annotation))
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
|
||||
def create_annotation(module):
|
||||
''' Takes ansible module object '''
|
||||
annotation = {}
|
||||
duration = module.params['duration']
|
||||
if module.params['start'] is not None:
|
||||
start = module.params['start']
|
||||
else:
|
||||
start = int(time.time())
|
||||
if module.params['stop'] is not None:
|
||||
stop = module.params['stop']
|
||||
else:
|
||||
stop = int(time.time()) + duration
|
||||
annotation['start'] = start
|
||||
annotation['stop'] = stop
|
||||
annotation['category'] = module.params['category']
|
||||
annotation['description'] = module.params['description']
|
||||
annotation['title'] = module.params['title']
|
||||
return annotation
|
||||
|
||||
|
||||
def build_headers(api_token):
|
||||
'''Takes api token, returns headers with it included.'''
|
||||
headers = {'X-Circonus-App-Name': 'ansible',
|
||||
'Host': 'api.circonus.com', 'X-Circonus-Auth-Token': api_token,
|
||||
'Accept': 'application/json'}
|
||||
return headers
|
||||
|
||||
|
||||
def main():
|
||||
'''Main function, dispatches logic'''
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
start=dict(type='int'),
|
||||
stop=dict(type='int'),
|
||||
category=dict(required=True),
|
||||
title=dict(required=True),
|
||||
description=dict(required=True),
|
||||
duration=dict(default=0, type='int'),
|
||||
api_key=dict(required=True, no_log=True)
|
||||
)
|
||||
)
|
||||
|
||||
check_requests_dep(module)
|
||||
|
||||
annotation = create_annotation(module)
|
||||
try:
|
||||
resp = post_annotation(annotation, module.params['api_key'])
|
||||
except requests.exceptions.RequestException as e:
|
||||
module.fail_json(msg='Request Failed', reason=to_native(e), exception=traceback.format_exc())
|
||||
module.exit_json(changed=True, annotation=resp.json())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
161
plugins/modules/monitoring/datadog/datadog_event.py
Normal file
161
plugins/modules/monitoring/datadog/datadog_event.py
Normal file
@@ -0,0 +1,161 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Author: Artūras 'arturaz' Šlajus <x11@arturaz.net>
|
||||
# Author: Naoya Nakazawa <naoya.n@gmail.com>
|
||||
#
|
||||
# This module is proudly sponsored by iGeolise (www.igeolise.com) and
|
||||
# Tiny Lab Productions (www.tinylabproductions.com).
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: datadog_event
|
||||
short_description: Posts events to Datadog service
|
||||
description:
|
||||
- "Allows to post events to Datadog (www.datadoghq.com) service."
|
||||
- "Uses http://docs.datadoghq.com/api/#events API."
|
||||
author:
|
||||
- "Artūras `arturaz` Šlajus (@arturaz)"
|
||||
- "Naoya Nakazawa (@n0ts)"
|
||||
options:
|
||||
api_key:
|
||||
description: ["Your DataDog API key."]
|
||||
required: true
|
||||
app_key:
|
||||
description: ["Your DataDog app key."]
|
||||
required: true
|
||||
title:
|
||||
description: ["The event title."]
|
||||
required: true
|
||||
text:
|
||||
description: ["The body of the event."]
|
||||
required: true
|
||||
date_happened:
|
||||
description:
|
||||
- POSIX timestamp of the event.
|
||||
- Default value is now.
|
||||
default: now
|
||||
priority:
|
||||
description: ["The priority of the event."]
|
||||
default: normal
|
||||
choices: [normal, low]
|
||||
host:
|
||||
description: ["Host name to associate with the event."]
|
||||
default: "{{ ansible_hostname }}"
|
||||
tags:
|
||||
description: ["Comma separated list of tags to apply to the event."]
|
||||
alert_type:
|
||||
description: ["Type of alert."]
|
||||
default: info
|
||||
choices: ['error', 'warning', 'info', 'success']
|
||||
aggregation_key:
|
||||
description: ["An arbitrary string to use for aggregation."]
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
type: bool
|
||||
default: 'yes'
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Post an event with low priority
|
||||
- datadog_event:
|
||||
title: Testing from ansible
|
||||
text: Test
|
||||
priority: low
|
||||
api_key: 9775a026f1ca7d1c6c5af9d94d9595a4
|
||||
app_key: j4JyCYfefWHhgFgiZUqRm63AXHNZQyPGBfJtAzmN
|
||||
# Post an event with several tags
|
||||
- datadog_event:
|
||||
title: Testing from ansible
|
||||
text: Test
|
||||
api_key: 9775a026f1ca7d1c6c5af9d94d9595a4
|
||||
app_key: j4JyCYfefWHhgFgiZUqRm63AXHNZQyPGBfJtAzmN
|
||||
tags: 'aa,bb,#host:{{ inventory_hostname }}'
|
||||
'''
|
||||
|
||||
import platform
|
||||
import traceback
|
||||
|
||||
# Import Datadog
|
||||
DATADOG_IMP_ERR = None
|
||||
try:
|
||||
from datadog import initialize, api
|
||||
HAS_DATADOG = True
|
||||
except Exception:
|
||||
DATADOG_IMP_ERR = traceback.format_exc()
|
||||
HAS_DATADOG = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
api_key=dict(required=True, no_log=True),
|
||||
app_key=dict(required=True, no_log=True),
|
||||
title=dict(required=True),
|
||||
text=dict(required=True),
|
||||
date_happened=dict(required=False, default=None, type='int'),
|
||||
priority=dict(
|
||||
required=False, default='normal', choices=['normal', 'low']
|
||||
),
|
||||
host=dict(required=False, default=None),
|
||||
tags=dict(required=False, default=None, type='list'),
|
||||
alert_type=dict(
|
||||
required=False, default='info',
|
||||
choices=['error', 'warning', 'info', 'success']
|
||||
),
|
||||
aggregation_key=dict(required=False, default=None),
|
||||
validate_certs=dict(default='yes', type='bool'),
|
||||
)
|
||||
)
|
||||
|
||||
# Prepare Datadog
|
||||
if not HAS_DATADOG:
|
||||
module.fail_json(msg=missing_required_lib('datadogpy'), exception=DATADOG_IMP_ERR)
|
||||
|
||||
options = {
|
||||
'api_key': module.params['api_key'],
|
||||
'app_key': module.params['app_key']
|
||||
}
|
||||
|
||||
initialize(**options)
|
||||
|
||||
_post_event(module)
|
||||
|
||||
|
||||
def _post_event(module):
|
||||
try:
|
||||
if module.params['host'] is None:
|
||||
module.params['host'] = platform.node().split('.')[0]
|
||||
msg = api.Event.create(title=module.params['title'],
|
||||
text=module.params['text'],
|
||||
host=module.params['host'],
|
||||
tags=module.params['tags'],
|
||||
priority=module.params['priority'],
|
||||
alert_type=module.params['alert_type'],
|
||||
aggregation_key=module.params['aggregation_key'],
|
||||
source_type_name='ansible')
|
||||
if msg['status'] != 'ok':
|
||||
module.fail_json(msg=msg)
|
||||
|
||||
module.exit_json(changed=True, msg=msg)
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
396
plugins/modules/monitoring/datadog/datadog_monitor.py
Normal file
396
plugins/modules/monitoring/datadog/datadog_monitor.py
Normal file
@@ -0,0 +1,396 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2015, Sebastian Kornehl <sebastian.kornehl@asideas.de>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: datadog_monitor
|
||||
short_description: Manages Datadog monitors
|
||||
description:
|
||||
- Manages monitors within Datadog.
|
||||
- Options as described on https://docs.datadoghq.com/api/.
|
||||
author: Sebastian Kornehl (@skornehl)
|
||||
requirements: [datadog]
|
||||
options:
|
||||
api_key:
|
||||
description:
|
||||
- Your Datadog API key.
|
||||
required: true
|
||||
type: str
|
||||
api_host:
|
||||
description:
|
||||
- The URL to the Datadog API. Default value is C(https://api.datadoghq.com).
|
||||
- This value can also be set with the C(DATADOG_HOST) environment variable.
|
||||
required: false
|
||||
type: str
|
||||
app_key:
|
||||
description:
|
||||
- Your Datadog app key.
|
||||
required: true
|
||||
type: str
|
||||
state:
|
||||
description:
|
||||
- The designated state of the monitor.
|
||||
required: true
|
||||
choices: ['present', 'absent', 'mute', 'unmute']
|
||||
type: str
|
||||
tags:
|
||||
description:
|
||||
- A list of tags to associate with your monitor when creating or updating.
|
||||
- This can help you categorize and filter monitors.
|
||||
type: list
|
||||
type:
|
||||
description:
|
||||
- The type of the monitor.
|
||||
choices: ['metric alert', 'service check', 'event alert', 'process alert']
|
||||
type: str
|
||||
query:
|
||||
description:
|
||||
- The monitor query to notify on.
|
||||
- Syntax varies depending on what type of monitor you are creating.
|
||||
type: str
|
||||
name:
|
||||
description:
|
||||
- The name of the alert.
|
||||
required: true
|
||||
type: str
|
||||
notification_message:
|
||||
description:
|
||||
- A message to include with notifications for this monitor.
|
||||
- Email notifications can be sent to specific users by using the same '@username' notation as events.
|
||||
- Monitor message template variables can be accessed by using double square brackets, i.e '[[' and ']]'.
|
||||
- C(message) alias is deprecated in Ansible 2.10, since it is used internally by Ansible Core Engine.
|
||||
type: str
|
||||
aliases: [ 'message' ]
|
||||
silenced:
|
||||
description:
|
||||
- Dictionary of scopes to silence, with timestamps or None.
|
||||
- Each scope will be muted until the given POSIX timestamp or forever if the value is None.
|
||||
default: ""
|
||||
notify_no_data:
|
||||
description:
|
||||
- Whether this monitor will notify when data stops reporting.
|
||||
type: bool
|
||||
default: 'no'
|
||||
no_data_timeframe:
|
||||
description:
|
||||
- The number of minutes before a monitor will notify when data stops reporting.
|
||||
- Must be at least 2x the monitor timeframe for metric alerts or 2 minutes for service checks.
|
||||
default: 2x timeframe for metric, 2 minutes for service
|
||||
type: str
|
||||
timeout_h:
|
||||
description:
|
||||
- The number of hours of the monitor not reporting data before it will automatically resolve from a triggered state.
|
||||
type: str
|
||||
renotify_interval:
|
||||
description:
|
||||
- The number of minutes after the last notification before a monitor will re-notify on the current status.
|
||||
- It will only re-notify if it is not resolved.
|
||||
type: str
|
||||
escalation_message:
|
||||
description:
|
||||
- A message to include with a re-notification. Supports the '@username' notification we allow elsewhere.
|
||||
- Not applicable if I(renotify_interval=None).
|
||||
type: str
|
||||
notify_audit:
|
||||
description:
|
||||
- Whether tagged users will be notified on changes to this monitor.
|
||||
type: bool
|
||||
default: 'no'
|
||||
thresholds:
|
||||
description:
|
||||
- A dictionary of thresholds by status.
|
||||
- Only available for service checks and metric alerts.
|
||||
- Because each of them can have multiple thresholds, we do not define them directly in the query.
|
||||
default: {'ok': 1, 'critical': 1, 'warning': 1}
|
||||
locked:
|
||||
description:
|
||||
- Whether changes to this monitor should be restricted to the creator or admins.
|
||||
type: bool
|
||||
default: 'no'
|
||||
require_full_window:
|
||||
description:
|
||||
- Whether this monitor needs a full window of data before it gets evaluated.
|
||||
- We highly recommend you set this to False for sparse metrics, otherwise some evaluations will be skipped.
|
||||
type: bool
|
||||
new_host_delay:
|
||||
description:
|
||||
- A positive integer representing the number of seconds to wait before evaluating the monitor for new hosts.
|
||||
- This gives the host time to fully initialize.
|
||||
type: str
|
||||
evaluation_delay:
|
||||
description:
|
||||
- Time to delay evaluation (in seconds).
|
||||
- Effective for sparse values.
|
||||
type: str
|
||||
id:
|
||||
description:
|
||||
- The ID of the alert.
|
||||
- If set, will be used instead of the name to locate the alert.
|
||||
type: str
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Create a metric monitor
|
||||
- datadog_monitor:
|
||||
type: "metric alert"
|
||||
name: "Test monitor"
|
||||
state: "present"
|
||||
query: "datadog.agent.up.over('host:host1').last(2).count_by_status()"
|
||||
notification_message: "Host [[host.name]] with IP [[host.ip]] is failing to report to datadog."
|
||||
api_key: "9775a026f1ca7d1c6c5af9d94d9595a4"
|
||||
app_key: "87ce4a24b5553d2e482ea8a8500e71b8ad4554ff"
|
||||
|
||||
# Deletes a monitor
|
||||
- datadog_monitor:
|
||||
name: "Test monitor"
|
||||
state: "absent"
|
||||
api_key: "9775a026f1ca7d1c6c5af9d94d9595a4"
|
||||
app_key: "87ce4a24b5553d2e482ea8a8500e71b8ad4554ff"
|
||||
|
||||
# Mutes a monitor
|
||||
- datadog_monitor:
|
||||
name: "Test monitor"
|
||||
state: "mute"
|
||||
silenced: '{"*":None}'
|
||||
api_key: "9775a026f1ca7d1c6c5af9d94d9595a4"
|
||||
app_key: "87ce4a24b5553d2e482ea8a8500e71b8ad4554ff"
|
||||
|
||||
# Unmutes a monitor
|
||||
- datadog_monitor:
|
||||
name: "Test monitor"
|
||||
state: "unmute"
|
||||
api_key: "9775a026f1ca7d1c6c5af9d94d9595a4"
|
||||
app_key: "87ce4a24b5553d2e482ea8a8500e71b8ad4554ff"
|
||||
|
||||
# Use datadoghq.eu platform instead of datadoghq.com
|
||||
- datadog_monitor:
|
||||
name: "Test monitor"
|
||||
state: "absent"
|
||||
api_host: https://api.datadoghq.eu
|
||||
api_key: "9775a026f1ca7d1c6c5af9d94d9595a4"
|
||||
app_key: "87ce4a24b5553d2e482ea8a8500e71b8ad4554ff"
|
||||
'''
|
||||
import traceback
|
||||
|
||||
# Import Datadog
|
||||
DATADOG_IMP_ERR = None
|
||||
try:
|
||||
from datadog import initialize, api
|
||||
HAS_DATADOG = True
|
||||
except Exception:
|
||||
DATADOG_IMP_ERR = traceback.format_exc()
|
||||
HAS_DATADOG = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
api_key=dict(required=True, no_log=True),
|
||||
api_host=dict(required=False),
|
||||
app_key=dict(required=True, no_log=True),
|
||||
state=dict(required=True, choices=['present', 'absent', 'mute', 'unmute']),
|
||||
type=dict(required=False, choices=['metric alert', 'service check', 'event alert', 'process alert']),
|
||||
name=dict(required=True),
|
||||
query=dict(required=False),
|
||||
notification_message=dict(required=False, default=None, aliases=['message'], deprecated_aliases=[dict(name='message', version='2.14')]),
|
||||
silenced=dict(required=False, default=None, type='dict'),
|
||||
notify_no_data=dict(required=False, default=False, type='bool'),
|
||||
no_data_timeframe=dict(required=False, default=None),
|
||||
timeout_h=dict(required=False, default=None),
|
||||
renotify_interval=dict(required=False, default=None),
|
||||
escalation_message=dict(required=False, default=None),
|
||||
notify_audit=dict(required=False, default=False, type='bool'),
|
||||
thresholds=dict(required=False, type='dict', default=None),
|
||||
tags=dict(required=False, type='list', default=None),
|
||||
locked=dict(required=False, default=False, type='bool'),
|
||||
require_full_window=dict(required=False, default=None, type='bool'),
|
||||
new_host_delay=dict(required=False, default=None),
|
||||
evaluation_delay=dict(required=False, default=None),
|
||||
id=dict(required=False)
|
||||
)
|
||||
)
|
||||
|
||||
# Prepare Datadog
|
||||
if not HAS_DATADOG:
|
||||
module.fail_json(msg=missing_required_lib('datadogpy'), exception=DATADOG_IMP_ERR)
|
||||
|
||||
if 'message' in module.params:
|
||||
module.fail_json(msg="'message' is reserved keyword, please change this parameter to 'notification_message'")
|
||||
|
||||
options = {
|
||||
'api_key': module.params['api_key'],
|
||||
'api_host': module.params['api_host'],
|
||||
'app_key': module.params['app_key']
|
||||
}
|
||||
|
||||
initialize(**options)
|
||||
|
||||
# Check if api_key and app_key is correct or not
|
||||
# if not, then fail here.
|
||||
response = api.Monitor.get_all()
|
||||
if isinstance(response, dict):
|
||||
msg = response.get('errors', None)
|
||||
if msg:
|
||||
module.fail_json(msg="Failed to connect Datadog server using given app_key and api_key : {0}".format(msg[0]))
|
||||
|
||||
if module.params['state'] == 'present':
|
||||
install_monitor(module)
|
||||
elif module.params['state'] == 'absent':
|
||||
delete_monitor(module)
|
||||
elif module.params['state'] == 'mute':
|
||||
mute_monitor(module)
|
||||
elif module.params['state'] == 'unmute':
|
||||
unmute_monitor(module)
|
||||
|
||||
|
||||
def _fix_template_vars(message):
|
||||
if message:
|
||||
return message.replace('[[', '{{').replace(']]', '}}')
|
||||
return message
|
||||
|
||||
|
||||
def _get_monitor(module):
|
||||
if module.params['id'] is not None:
|
||||
monitor = api.Monitor.get(module.params['id'])
|
||||
if 'errors' in monitor:
|
||||
module.fail_json(msg="Failed to retrieve monitor with id %s, errors are %s" % (module.params['id'], str(monitor['errors'])))
|
||||
return monitor
|
||||
else:
|
||||
monitors = api.Monitor.get_all()
|
||||
for monitor in monitors:
|
||||
if monitor['name'] == _fix_template_vars(module.params['name']):
|
||||
return monitor
|
||||
return {}
|
||||
|
||||
|
||||
def _post_monitor(module, options):
|
||||
try:
|
||||
kwargs = dict(type=module.params['type'], query=module.params['query'],
|
||||
name=_fix_template_vars(module.params['name']),
|
||||
message=_fix_template_vars(module.params['notification_message']),
|
||||
escalation_message=_fix_template_vars(module.params['escalation_message']),
|
||||
options=options)
|
||||
if module.params['tags'] is not None:
|
||||
kwargs['tags'] = module.params['tags']
|
||||
msg = api.Monitor.create(**kwargs)
|
||||
if 'errors' in msg:
|
||||
module.fail_json(msg=str(msg['errors']))
|
||||
else:
|
||||
module.exit_json(changed=True, msg=msg)
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
|
||||
|
||||
|
||||
def _equal_dicts(a, b, ignore_keys):
|
||||
ka = set(a).difference(ignore_keys)
|
||||
kb = set(b).difference(ignore_keys)
|
||||
return ka == kb and all(a[k] == b[k] for k in ka)
|
||||
|
||||
|
||||
def _update_monitor(module, monitor, options):
|
||||
try:
|
||||
kwargs = dict(id=monitor['id'], query=module.params['query'],
|
||||
name=_fix_template_vars(module.params['name']),
|
||||
message=_fix_template_vars(module.params['message']),
|
||||
escalation_message=_fix_template_vars(module.params['escalation_message']),
|
||||
options=options)
|
||||
if module.params['tags'] is not None:
|
||||
kwargs['tags'] = module.params['tags']
|
||||
msg = api.Monitor.update(**kwargs)
|
||||
|
||||
if 'errors' in msg:
|
||||
module.fail_json(msg=str(msg['errors']))
|
||||
elif _equal_dicts(msg, monitor, ['creator', 'overall_state', 'modified', 'matching_downtimes', 'overall_state_modified']):
|
||||
module.exit_json(changed=False, msg=msg)
|
||||
else:
|
||||
module.exit_json(changed=True, msg=msg)
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
|
||||
|
||||
|
||||
def install_monitor(module):
|
||||
options = {
|
||||
"silenced": module.params['silenced'],
|
||||
"notify_no_data": module.boolean(module.params['notify_no_data']),
|
||||
"no_data_timeframe": module.params['no_data_timeframe'],
|
||||
"timeout_h": module.params['timeout_h'],
|
||||
"renotify_interval": module.params['renotify_interval'],
|
||||
"escalation_message": module.params['escalation_message'],
|
||||
"notify_audit": module.boolean(module.params['notify_audit']),
|
||||
"locked": module.boolean(module.params['locked']),
|
||||
"require_full_window": module.params['require_full_window'],
|
||||
"new_host_delay": module.params['new_host_delay'],
|
||||
"evaluation_delay": module.params['evaluation_delay']
|
||||
}
|
||||
|
||||
if module.params['type'] == "service check":
|
||||
options["thresholds"] = module.params['thresholds'] or {'ok': 1, 'critical': 1, 'warning': 1}
|
||||
if module.params['type'] in ["metric alert", "log alert"] and module.params['thresholds'] is not None:
|
||||
options["thresholds"] = module.params['thresholds']
|
||||
|
||||
monitor = _get_monitor(module)
|
||||
if not monitor:
|
||||
_post_monitor(module, options)
|
||||
else:
|
||||
_update_monitor(module, monitor, options)
|
||||
|
||||
|
||||
def delete_monitor(module):
|
||||
monitor = _get_monitor(module)
|
||||
if not monitor:
|
||||
module.exit_json(changed=False)
|
||||
try:
|
||||
msg = api.Monitor.delete(monitor['id'])
|
||||
module.exit_json(changed=True, msg=msg)
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
|
||||
|
||||
|
||||
def mute_monitor(module):
|
||||
monitor = _get_monitor(module)
|
||||
if not monitor:
|
||||
module.fail_json(msg="Monitor %s not found!" % module.params['name'])
|
||||
elif monitor['options']['silenced']:
|
||||
module.fail_json(msg="Monitor is already muted. Datadog does not allow to modify muted alerts, consider unmuting it first.")
|
||||
elif (module.params['silenced'] is not None and len(set(monitor['options']['silenced']) ^ set(module.params['silenced'])) == 0):
|
||||
module.exit_json(changed=False)
|
||||
try:
|
||||
if module.params['silenced'] is None or module.params['silenced'] == "":
|
||||
msg = api.Monitor.mute(id=monitor['id'])
|
||||
else:
|
||||
msg = api.Monitor.mute(id=monitor['id'], silenced=module.params['silenced'])
|
||||
module.exit_json(changed=True, msg=msg)
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
|
||||
|
||||
|
||||
def unmute_monitor(module):
|
||||
monitor = _get_monitor(module)
|
||||
if not monitor:
|
||||
module.fail_json(msg="Monitor %s not found!" % module.params['name'])
|
||||
elif not monitor['options']['silenced']:
|
||||
module.exit_json(changed=False)
|
||||
try:
|
||||
msg = api.Monitor.unmute(monitor['id'])
|
||||
module.exit_json(changed=True, msg=msg)
|
||||
except Exception as e:
|
||||
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
126
plugins/modules/monitoring/honeybadger_deployment.py
Normal file
126
plugins/modules/monitoring/honeybadger_deployment.py
Normal file
@@ -0,0 +1,126 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2014 Benjamin Curtis <benjamin.curtis@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: honeybadger_deployment
|
||||
author: "Benjamin Curtis (@stympy)"
|
||||
short_description: Notify Honeybadger.io about app deployments
|
||||
description:
|
||||
- Notify Honeybadger.io about app deployments (see http://docs.honeybadger.io/article/188-deployment-tracking)
|
||||
options:
|
||||
token:
|
||||
description:
|
||||
- API token.
|
||||
required: true
|
||||
environment:
|
||||
description:
|
||||
- The environment name, typically 'production', 'staging', etc.
|
||||
required: true
|
||||
user:
|
||||
description:
|
||||
- The username of the person doing the deployment
|
||||
repo:
|
||||
description:
|
||||
- URL of the project repository
|
||||
revision:
|
||||
description:
|
||||
- A hash, number, tag, or other identifier showing what revision was deployed
|
||||
url:
|
||||
description:
|
||||
- Optional URL to submit the notification to.
|
||||
default: "https://api.honeybadger.io/v1/deploys"
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates for the target url will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
type: bool
|
||||
default: 'yes'
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- honeybadger_deployment:
|
||||
token: AAAAAA
|
||||
environment: staging
|
||||
user: ansible
|
||||
revision: b6826b8
|
||||
repo: 'git@github.com:user/repo.git'
|
||||
'''
|
||||
|
||||
RETURN = '''# '''
|
||||
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
|
||||
|
||||
# ===========================================
|
||||
# Module execution.
|
||||
#
|
||||
|
||||
def main():
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
token=dict(required=True, no_log=True),
|
||||
environment=dict(required=True),
|
||||
user=dict(required=False),
|
||||
repo=dict(required=False),
|
||||
revision=dict(required=False),
|
||||
url=dict(required=False, default='https://api.honeybadger.io/v1/deploys'),
|
||||
validate_certs=dict(default='yes', type='bool'),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
params = {}
|
||||
|
||||
if module.params["environment"]:
|
||||
params["deploy[environment]"] = module.params["environment"]
|
||||
|
||||
if module.params["user"]:
|
||||
params["deploy[local_username]"] = module.params["user"]
|
||||
|
||||
if module.params["repo"]:
|
||||
params["deploy[repository]"] = module.params["repo"]
|
||||
|
||||
if module.params["revision"]:
|
||||
params["deploy[revision]"] = module.params["revision"]
|
||||
|
||||
params["api_key"] = module.params["token"]
|
||||
|
||||
url = module.params.get('url')
|
||||
|
||||
# If we're in check mode, just exit pretending like we succeeded
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
|
||||
try:
|
||||
data = urlencode(params)
|
||||
response, info = fetch_url(module, url, data=data)
|
||||
except Exception as e:
|
||||
module.fail_json(msg='Unable to notify Honeybadger: %s' % to_native(e), exception=traceback.format_exc())
|
||||
else:
|
||||
if info['status'] == 201:
|
||||
module.exit_json(changed=True)
|
||||
else:
|
||||
module.fail_json(msg="HTTP result code: %d connecting to %s" % (info['status'], url))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
131
plugins/modules/monitoring/icinga2_feature.py
Normal file
131
plugins/modules/monitoring/icinga2_feature.py
Normal file
@@ -0,0 +1,131 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2016, Loic Blot <loic.blot@unix-experience.fr>
|
||||
# Copyright (c) 2018, Ansible Project
|
||||
# Sponsored by Infopro Digital. http://www.infopro-digital.com/
|
||||
# Sponsored by E.T.A.I. http://www.etai.fr/
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'
|
||||
}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: icinga2_feature
|
||||
|
||||
short_description: Manage Icinga2 feature
|
||||
description:
|
||||
- This module can be used to enable or disable an Icinga2 feature.
|
||||
author: "Loic Blot (@nerzhul)"
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- This is the feature name to enable or disable.
|
||||
required: True
|
||||
state:
|
||||
description:
|
||||
- If set to C(present) and feature is disabled, then feature is enabled.
|
||||
- If set to C(present) and feature is already enabled, then nothing is changed.
|
||||
- If set to C(absent) and feature is enabled, then feature is disabled.
|
||||
- If set to C(absent) and feature is already disabled, then nothing is changed.
|
||||
choices: [ "present", "absent" ]
|
||||
default: present
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Enable ido-pgsql feature
|
||||
icinga2_feature:
|
||||
name: ido-pgsql
|
||||
state: present
|
||||
|
||||
- name: Disable api feature
|
||||
icinga2_feature:
|
||||
name: api
|
||||
state: absent
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
#
|
||||
'''
|
||||
|
||||
import re
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
class Icinga2FeatureHelper:
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
self._icinga2 = module.get_bin_path('icinga2', True)
|
||||
self.feature_name = self.module.params['name']
|
||||
self.state = self.module.params['state']
|
||||
|
||||
def _exec(self, args):
|
||||
cmd = [self._icinga2, 'feature']
|
||||
rc, out, err = self.module.run_command(cmd + args, check_rc=True)
|
||||
return rc, out
|
||||
|
||||
def manage(self):
|
||||
rc, out = self._exec(["list"])
|
||||
if rc != 0:
|
||||
self.module.fail_json(msg="Unable to list icinga2 features. "
|
||||
"Ensure icinga2 is installed and present in binary path.")
|
||||
|
||||
# If feature is already in good state, just exit
|
||||
if (re.search("Disabled features:.* %s[ \n]" % self.feature_name, out) and self.state == "absent") or \
|
||||
(re.search("Enabled features:.* %s[ \n]" % self.feature_name, out) and self.state == "present"):
|
||||
self.module.exit_json(changed=False)
|
||||
|
||||
if self.module.check_mode:
|
||||
self.module.exit_json(changed=True)
|
||||
|
||||
feature_enable_str = "enable" if self.state == "present" else "disable"
|
||||
|
||||
rc, out = self._exec([feature_enable_str, self.feature_name])
|
||||
|
||||
change_applied = False
|
||||
if self.state == "present":
|
||||
if rc != 0:
|
||||
self.module.fail_json(msg="Failed to %s feature %s."
|
||||
" icinga2 command returned %s" % (feature_enable_str,
|
||||
self.feature_name,
|
||||
out))
|
||||
|
||||
if re.search("already enabled", out) is None:
|
||||
change_applied = True
|
||||
else:
|
||||
if rc == 0:
|
||||
change_applied = True
|
||||
# RC is not 0 for this already disabled feature, handle it as no change applied
|
||||
elif re.search("Cannot disable feature '%s'. Target file .* does not exist" % self.feature_name, out):
|
||||
change_applied = False
|
||||
else:
|
||||
self.module.fail_json(msg="Failed to disable feature. Command returns %s" % out)
|
||||
|
||||
self.module.exit_json(changed=change_applied)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(type='str', required=True),
|
||||
state=dict(type='str', choices=["present", "absent"], default="present")
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
|
||||
Icinga2FeatureHelper(module).manage()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
322
plugins/modules/monitoring/icinga2_host.py
Normal file
322
plugins/modules/monitoring/icinga2_host.py
Normal file
@@ -0,0 +1,322 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This module is proudly sponsored by CGI (www.cgi.com) and
|
||||
# KPN (www.kpn.com).
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: icinga2_host
|
||||
short_description: Manage a host in Icinga2
|
||||
description:
|
||||
- "Add or remove a host to Icinga2 through the API."
|
||||
- "See U(https://www.icinga.com/docs/icinga2/latest/doc/12-icinga2-api/)"
|
||||
author: "Jurgen Brand (@t794104)"
|
||||
options:
|
||||
url:
|
||||
description:
|
||||
- HTTP, HTTPS, or FTP URL in the form (http|https|ftp)://[user[:pass]]@host.domain[:port]/path
|
||||
required: true
|
||||
use_proxy:
|
||||
description:
|
||||
- If C(no), it will not use a proxy, even if one is defined in
|
||||
an environment variable on the target hosts.
|
||||
type: bool
|
||||
default: 'yes'
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
type: bool
|
||||
default: 'yes'
|
||||
url_username:
|
||||
description:
|
||||
- The username for use in HTTP basic authentication.
|
||||
- This parameter can be used without C(url_password) for sites that allow empty passwords.
|
||||
url_password:
|
||||
description:
|
||||
- The password for use in HTTP basic authentication.
|
||||
- If the C(url_username) parameter is not specified, the C(url_password) parameter will not be used.
|
||||
force_basic_auth:
|
||||
description:
|
||||
- httplib2, the library used by the uri module only sends authentication information when a webservice
|
||||
responds to an initial request with a 401 status. Since some basic auth services do not properly
|
||||
send a 401, logins will fail. This option forces the sending of the Basic authentication header
|
||||
upon initial request.
|
||||
type: bool
|
||||
default: 'no'
|
||||
client_cert:
|
||||
description:
|
||||
- PEM formatted certificate chain file to be used for SSL client
|
||||
authentication. This file can also include the key as well, and if
|
||||
the key is included, C(client_key) is not required.
|
||||
client_key:
|
||||
description:
|
||||
- PEM formatted file that contains your private key to be used for SSL
|
||||
client authentication. If C(client_cert) contains both the certificate
|
||||
and key, this option is not required.
|
||||
state:
|
||||
description:
|
||||
- Apply feature state.
|
||||
choices: [ "present", "absent" ]
|
||||
default: present
|
||||
name:
|
||||
description:
|
||||
- Name used to create / delete the host. This does not need to be the FQDN, but does needs to be unique.
|
||||
required: true
|
||||
zone:
|
||||
description:
|
||||
- The zone from where this host should be polled.
|
||||
template:
|
||||
description:
|
||||
- The template used to define the host.
|
||||
- Template cannot be modified after object creation.
|
||||
check_command:
|
||||
description:
|
||||
- The command used to check if the host is alive.
|
||||
default: "hostalive"
|
||||
display_name:
|
||||
description:
|
||||
- The name used to display the host.
|
||||
default: if none is give it is the value of the <name> parameter
|
||||
ip:
|
||||
description:
|
||||
- The IP address of the host.
|
||||
required: true
|
||||
variables:
|
||||
description:
|
||||
- List of variables.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Add host to icinga
|
||||
icinga2_host:
|
||||
url: "https://icinga2.example.com"
|
||||
url_username: "ansible"
|
||||
url_password: "a_secret"
|
||||
state: present
|
||||
name: "{{ ansible_fqdn }}"
|
||||
ip: "{{ ansible_default_ipv4.address }}"
|
||||
delegate_to: 127.0.0.1
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
name:
|
||||
description: The name used to create, modify or delete the host
|
||||
type: str
|
||||
returned: always
|
||||
data:
|
||||
description: The data structure used for create, modify or delete of the host
|
||||
type: dict
|
||||
returned: always
|
||||
'''
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.urls import fetch_url, url_argument_spec
|
||||
|
||||
|
||||
# ===========================================
|
||||
# Icinga2 API class
|
||||
#
|
||||
class icinga2_api:
|
||||
module = None
|
||||
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
|
||||
def call_url(self, path, data='', method='GET'):
|
||||
headers = {
|
||||
'Accept': 'application/json',
|
||||
'X-HTTP-Method-Override': method,
|
||||
}
|
||||
url = self.module.params.get("url") + "/" + path
|
||||
rsp, info = fetch_url(module=self.module, url=url, data=data, headers=headers, method=method, use_proxy=self.module.params['use_proxy'])
|
||||
body = ''
|
||||
if rsp:
|
||||
body = json.loads(rsp.read())
|
||||
if info['status'] >= 400:
|
||||
body = info['body']
|
||||
return {'code': info['status'], 'data': body}
|
||||
|
||||
def check_connection(self):
|
||||
ret = self.call_url('v1/status')
|
||||
if ret['code'] == 200:
|
||||
return True
|
||||
return False
|
||||
|
||||
def exists(self, hostname):
|
||||
data = {
|
||||
"filter": "match(\"" + hostname + "\", host.name)",
|
||||
}
|
||||
ret = self.call_url(
|
||||
path="v1/objects/hosts",
|
||||
data=self.module.jsonify(data)
|
||||
)
|
||||
if ret['code'] == 200:
|
||||
if len(ret['data']['results']) == 1:
|
||||
return True
|
||||
return False
|
||||
|
||||
def create(self, hostname, data):
|
||||
ret = self.call_url(
|
||||
path="v1/objects/hosts/" + hostname,
|
||||
data=self.module.jsonify(data),
|
||||
method="PUT"
|
||||
)
|
||||
return ret
|
||||
|
||||
def delete(self, hostname):
|
||||
data = {"cascade": 1}
|
||||
ret = self.call_url(
|
||||
path="v1/objects/hosts/" + hostname,
|
||||
data=self.module.jsonify(data),
|
||||
method="DELETE"
|
||||
)
|
||||
return ret
|
||||
|
||||
def modify(self, hostname, data):
|
||||
ret = self.call_url(
|
||||
path="v1/objects/hosts/" + hostname,
|
||||
data=self.module.jsonify(data),
|
||||
method="POST"
|
||||
)
|
||||
return ret
|
||||
|
||||
def diff(self, hostname, data):
|
||||
ret = self.call_url(
|
||||
path="v1/objects/hosts/" + hostname,
|
||||
method="GET"
|
||||
)
|
||||
changed = False
|
||||
ic_data = ret['data']['results'][0]
|
||||
for key in data['attrs']:
|
||||
if key not in ic_data['attrs'].keys():
|
||||
changed = True
|
||||
elif data['attrs'][key] != ic_data['attrs'][key]:
|
||||
changed = True
|
||||
return changed
|
||||
|
||||
|
||||
# ===========================================
|
||||
# Module execution.
|
||||
#
|
||||
def main():
|
||||
# use the predefined argument spec for url
|
||||
argument_spec = url_argument_spec()
|
||||
# remove unnecessary argument 'force'
|
||||
del argument_spec['force']
|
||||
# add our own arguments
|
||||
argument_spec.update(
|
||||
state=dict(default="present", choices=["absent", "present"]),
|
||||
name=dict(required=True, aliases=['host']),
|
||||
zone=dict(),
|
||||
template=dict(default=None),
|
||||
check_command=dict(default="hostalive"),
|
||||
display_name=dict(default=None),
|
||||
ip=dict(required=True),
|
||||
variables=dict(type='dict', default=None),
|
||||
)
|
||||
|
||||
# Define the main module
|
||||
module = AnsibleModule(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
state = module.params["state"]
|
||||
name = module.params["name"]
|
||||
zone = module.params["zone"]
|
||||
template = []
|
||||
template.append(name)
|
||||
if module.params["template"]:
|
||||
template.append(module.params["template"])
|
||||
check_command = module.params["check_command"]
|
||||
ip = module.params["ip"]
|
||||
display_name = module.params["display_name"]
|
||||
if not display_name:
|
||||
display_name = name
|
||||
variables = module.params["variables"]
|
||||
|
||||
try:
|
||||
icinga = icinga2_api(module=module)
|
||||
icinga.check_connection()
|
||||
except Exception as e:
|
||||
module.fail_json(msg="unable to connect to Icinga. Exception message: %s" % (e))
|
||||
|
||||
data = {
|
||||
'attrs': {
|
||||
'address': ip,
|
||||
'display_name': display_name,
|
||||
'check_command': check_command,
|
||||
'zone': zone,
|
||||
'vars': {
|
||||
'made_by': "ansible",
|
||||
},
|
||||
'templates': template,
|
||||
}
|
||||
}
|
||||
|
||||
if variables:
|
||||
data['attrs']['vars'].update(variables)
|
||||
|
||||
changed = False
|
||||
if icinga.exists(name):
|
||||
if state == "absent":
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True, name=name, data=data)
|
||||
else:
|
||||
try:
|
||||
ret = icinga.delete(name)
|
||||
if ret['code'] == 200:
|
||||
changed = True
|
||||
else:
|
||||
module.fail_json(msg="bad return code deleting host: %s" % (ret['data']))
|
||||
except Exception as e:
|
||||
module.fail_json(msg="exception deleting host: " + str(e))
|
||||
|
||||
elif icinga.diff(name, data):
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=False, name=name, data=data)
|
||||
|
||||
# Template attribute is not allowed in modification
|
||||
del data['attrs']['templates']
|
||||
|
||||
ret = icinga.modify(name, data)
|
||||
|
||||
if ret['code'] == 200:
|
||||
changed = True
|
||||
else:
|
||||
module.fail_json(msg="bad return code modifying host: %s" % (ret['data']))
|
||||
|
||||
else:
|
||||
if state == "present":
|
||||
if module.check_mode:
|
||||
changed = True
|
||||
else:
|
||||
try:
|
||||
ret = icinga.create(name, data)
|
||||
if ret['code'] == 200:
|
||||
changed = True
|
||||
else:
|
||||
module.fail_json(msg="bad return code creating host: %s" % (ret['data']))
|
||||
except Exception as e:
|
||||
module.fail_json(msg="exception creating host: " + str(e))
|
||||
|
||||
module.exit_json(changed=changed, name=name, data=data)
|
||||
|
||||
|
||||
# import module snippets
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
163
plugins/modules/monitoring/librato_annotation.py
Normal file
163
plugins/modules/monitoring/librato_annotation.py
Normal file
@@ -0,0 +1,163 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# (C) Seth Edwards, 2014
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: librato_annotation
|
||||
short_description: create an annotation in librato
|
||||
description:
|
||||
- Create an annotation event on the given annotation stream :name. If the annotation stream does not exist, it will be created automatically
|
||||
author: "Seth Edwards (@Sedward)"
|
||||
requirements: []
|
||||
options:
|
||||
user:
|
||||
description:
|
||||
- Librato account username
|
||||
required: true
|
||||
api_key:
|
||||
description:
|
||||
- Librato account api key
|
||||
required: true
|
||||
name:
|
||||
description:
|
||||
- The annotation stream name
|
||||
- If the annotation stream does not exist, it will be created automatically
|
||||
required: false
|
||||
title:
|
||||
description:
|
||||
- The title of an annotation is a string and may contain spaces
|
||||
- The title should be a short, high-level summary of the annotation e.g. v45 Deployment
|
||||
required: true
|
||||
source:
|
||||
description:
|
||||
- A string which describes the originating source of an annotation when that annotation is tracked across multiple members of a population
|
||||
required: false
|
||||
description:
|
||||
description:
|
||||
- The description contains extra metadata about a particular annotation
|
||||
- The description should contain specifics on the individual annotation e.g. Deployed 9b562b2 shipped new feature foo!
|
||||
required: false
|
||||
start_time:
|
||||
description:
|
||||
- The unix timestamp indicating the time at which the event referenced by this annotation started
|
||||
required: false
|
||||
end_time:
|
||||
description:
|
||||
- The unix timestamp indicating the time at which the event referenced by this annotation ended
|
||||
- For events that have a duration, this is a useful way to annotate the duration of the event
|
||||
required: false
|
||||
links:
|
||||
description:
|
||||
- See examples
|
||||
required: true
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Create a simple annotation event with a source
|
||||
- librato_annotation:
|
||||
user: user@example.com
|
||||
api_key: XXXXXXXXXXXXXXXXX
|
||||
title: App Config Change
|
||||
source: foo.bar
|
||||
description: This is a detailed description of the config change
|
||||
|
||||
# Create an annotation that includes a link
|
||||
- librato_annotation:
|
||||
user: user@example.com
|
||||
api_key: XXXXXXXXXXXXXXXXXX
|
||||
name: code.deploy
|
||||
title: app code deploy
|
||||
description: this is a detailed description of a deployment
|
||||
links:
|
||||
- rel: example
|
||||
href: http://www.example.com/deploy
|
||||
|
||||
# Create an annotation with a start_time and end_time
|
||||
- librato_annotation:
|
||||
user: user@example.com
|
||||
api_key: XXXXXXXXXXXXXXXXXX
|
||||
name: maintenance
|
||||
title: Maintenance window
|
||||
description: This is a detailed description of maintenance
|
||||
start_time: 1395940006
|
||||
end_time: 1395954406
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
|
||||
|
||||
def post_annotation(module):
|
||||
user = module.params['user']
|
||||
api_key = module.params['api_key']
|
||||
name = module.params['name']
|
||||
title = module.params['title']
|
||||
|
||||
url = 'https://metrics-api.librato.com/v1/annotations/%s' % name
|
||||
params = {}
|
||||
params['title'] = title
|
||||
|
||||
if module.params['source'] is not None:
|
||||
params['source'] = module.params['source']
|
||||
if module.params['description'] is not None:
|
||||
params['description'] = module.params['description']
|
||||
if module.params['start_time'] is not None:
|
||||
params['start_time'] = module.params['start_time']
|
||||
if module.params['end_time'] is not None:
|
||||
params['end_time'] = module.params['end_time']
|
||||
if module.params['links'] is not None:
|
||||
params['links'] = module.params['links']
|
||||
|
||||
json_body = module.jsonify(params)
|
||||
|
||||
headers = {}
|
||||
headers['Content-Type'] = 'application/json'
|
||||
|
||||
# Hack send parameters the way fetch_url wants them
|
||||
module.params['url_username'] = user
|
||||
module.params['url_password'] = api_key
|
||||
response, info = fetch_url(module, url, data=json_body, headers=headers)
|
||||
response_code = str(info['status'])
|
||||
response_body = info['body']
|
||||
if info['status'] != 201:
|
||||
if info['status'] >= 400:
|
||||
module.fail_json(msg="Request Failed. Response code: " + response_code + " Response body: " + response_body)
|
||||
else:
|
||||
module.fail_json(msg="Request Failed. Response code: " + response_code)
|
||||
response = response.read()
|
||||
module.exit_json(changed=True, annotation=response)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
user=dict(required=True),
|
||||
api_key=dict(required=True),
|
||||
name=dict(required=False),
|
||||
title=dict(required=True),
|
||||
source=dict(required=False),
|
||||
description=dict(required=False),
|
||||
start_time=dict(required=False, default=None, type='int'),
|
||||
end_time=dict(required=False, default=None, type='int'),
|
||||
links=dict(type='list')
|
||||
)
|
||||
)
|
||||
|
||||
post_annotation(module)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
156
plugins/modules/monitoring/logentries.py
Normal file
156
plugins/modules/monitoring/logentries.py
Normal file
@@ -0,0 +1,156 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, Ivan Vanderbyl <ivan@app.io>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: logentries
|
||||
author: "Ivan Vanderbyl (@ivanvanderbyl)"
|
||||
short_description: Module for tracking logs via logentries.com
|
||||
description:
|
||||
- Sends logs to LogEntries in realtime
|
||||
options:
|
||||
path:
|
||||
description:
|
||||
- path to a log file
|
||||
required: true
|
||||
state:
|
||||
description:
|
||||
- following state of the log
|
||||
choices: [ 'present', 'absent' ]
|
||||
required: false
|
||||
default: present
|
||||
name:
|
||||
description:
|
||||
- name of the log
|
||||
required: false
|
||||
logtype:
|
||||
description:
|
||||
- type of the log
|
||||
required: false
|
||||
|
||||
notes:
|
||||
- Requires the LogEntries agent which can be installed following the instructions at logentries.com
|
||||
'''
|
||||
EXAMPLES = '''
|
||||
# Track nginx logs
|
||||
- logentries:
|
||||
path: /var/log/nginx/access.log
|
||||
state: present
|
||||
name: nginx-access-log
|
||||
|
||||
# Stop tracking nginx logs
|
||||
- logentries:
|
||||
path: /var/log/nginx/error.log
|
||||
state: absent
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def query_log_status(module, le_path, path, state="present"):
|
||||
""" Returns whether a log is followed or not. """
|
||||
|
||||
if state == "present":
|
||||
rc, out, err = module.run_command("%s followed %s" % (le_path, path))
|
||||
if rc == 0:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def follow_log(module, le_path, logs, name=None, logtype=None):
|
||||
""" Follows one or more logs if not already followed. """
|
||||
|
||||
followed_count = 0
|
||||
|
||||
for log in logs:
|
||||
if query_log_status(module, le_path, log):
|
||||
continue
|
||||
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
|
||||
cmd = [le_path, 'follow', log]
|
||||
if name:
|
||||
cmd.extend(['--name', name])
|
||||
if logtype:
|
||||
cmd.extend(['--type', logtype])
|
||||
rc, out, err = module.run_command(' '.join(cmd))
|
||||
|
||||
if not query_log_status(module, le_path, log):
|
||||
module.fail_json(msg="failed to follow '%s': %s" % (log, err.strip()))
|
||||
|
||||
followed_count += 1
|
||||
|
||||
if followed_count > 0:
|
||||
module.exit_json(changed=True, msg="followed %d log(s)" % (followed_count,))
|
||||
|
||||
module.exit_json(changed=False, msg="logs(s) already followed")
|
||||
|
||||
|
||||
def unfollow_log(module, le_path, logs):
|
||||
""" Unfollows one or more logs if followed. """
|
||||
|
||||
removed_count = 0
|
||||
|
||||
# Using a for loop in case of error, we can report the package that failed
|
||||
for log in logs:
|
||||
# Query the log first, to see if we even need to remove.
|
||||
if not query_log_status(module, le_path, log):
|
||||
continue
|
||||
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
rc, out, err = module.run_command([le_path, 'rm', log])
|
||||
|
||||
if query_log_status(module, le_path, log):
|
||||
module.fail_json(msg="failed to remove '%s': %s" % (log, err.strip()))
|
||||
|
||||
removed_count += 1
|
||||
|
||||
if removed_count > 0:
|
||||
module.exit_json(changed=True, msg="removed %d package(s)" % removed_count)
|
||||
|
||||
module.exit_json(changed=False, msg="logs(s) already unfollowed")
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
path=dict(required=True),
|
||||
state=dict(default="present", choices=["present", "followed", "absent", "unfollowed"]),
|
||||
name=dict(required=False, default=None, type='str'),
|
||||
logtype=dict(required=False, default=None, type='str', aliases=['type'])
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
le_path = module.get_bin_path('le', True, ['/usr/local/bin'])
|
||||
|
||||
p = module.params
|
||||
|
||||
# Handle multiple log files
|
||||
logs = p["path"].split(",")
|
||||
logs = filter(None, logs)
|
||||
|
||||
if p["state"] in ["present", "followed"]:
|
||||
follow_log(module, le_path, logs, name=p['name'], logtype=p['logtype'])
|
||||
|
||||
elif p["state"] in ["absent", "unfollowed"]:
|
||||
unfollow_log(module, le_path, logs)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
2135
plugins/modules/monitoring/logicmonitor.py
Normal file
2135
plugins/modules/monitoring/logicmonitor.py
Normal file
File diff suppressed because it is too large
Load Diff
563
plugins/modules/monitoring/logicmonitor_facts.py
Normal file
563
plugins/modules/monitoring/logicmonitor_facts.py
Normal file
@@ -0,0 +1,563 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (C) 2015 LogicMonitor
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: logicmonitor_facts
|
||||
short_description: Collect facts about LogicMonitor objects
|
||||
description:
|
||||
- LogicMonitor is a hosted, full-stack, infrastructure monitoring platform.
|
||||
- This module collects facts about hosts and host groups within your LogicMonitor account.
|
||||
author: [Ethan Culler-Mayeno (@ethanculler), Jeff Wozniak (@woz5999)]
|
||||
notes:
|
||||
- You must have an existing LogicMonitor account for this module to function.
|
||||
requirements: ["An existing LogicMonitor account", "Linux"]
|
||||
options:
|
||||
target:
|
||||
description:
|
||||
- The LogicMonitor object you wish to manage.
|
||||
required: true
|
||||
choices: ['host', 'hostgroup']
|
||||
company:
|
||||
description:
|
||||
- The LogicMonitor account company name. If you would log in to your account at "superheroes.logicmonitor.com" you would use "superheroes".
|
||||
required: true
|
||||
user:
|
||||
description:
|
||||
- A LogicMonitor user name. The module will authenticate and perform actions on behalf of this user.
|
||||
required: true
|
||||
password:
|
||||
description:
|
||||
- The password for the chosen LogicMonitor User.
|
||||
- If an md5 hash is used, the digest flag must be set to true.
|
||||
required: true
|
||||
collector:
|
||||
description:
|
||||
- The fully qualified domain name of a collector in your LogicMonitor account.
|
||||
- This is optional for querying a LogicMonitor host when a displayname is specified.
|
||||
- This is required for querying a LogicMonitor host when a displayname is not specified.
|
||||
hostname:
|
||||
description:
|
||||
- The hostname of a host in your LogicMonitor account, or the desired hostname of a device to add into monitoring.
|
||||
- Required for managing hosts (target=host).
|
||||
default: 'hostname -f'
|
||||
displayname:
|
||||
description:
|
||||
- The display name of a host in your LogicMonitor account or the desired display name of a device to add into monitoring.
|
||||
default: 'hostname -f'
|
||||
fullpath:
|
||||
description:
|
||||
- The fullpath of the hostgroup object you would like to manage.
|
||||
- Recommend running on a single ansible host.
|
||||
- Required for management of LogicMonitor host groups (target=hostgroup).
|
||||
...
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Always run those modules on localhost using delegate_to:localhost, or localaction
|
||||
|
||||
- name: query a list of hosts
|
||||
logicmonitor_facts:
|
||||
target: host
|
||||
company: yourcompany
|
||||
user: Luigi
|
||||
password: ImaLuigi,number1!
|
||||
delegate_to: localhost
|
||||
|
||||
- name: query a host group
|
||||
logicmonitor_facts:
|
||||
target: hostgroup
|
||||
fullpath: /servers/production
|
||||
company: yourcompany
|
||||
user: mario
|
||||
password: itsame.Mario!
|
||||
delegate_to: localhost
|
||||
'''
|
||||
|
||||
|
||||
RETURN = '''
|
||||
---
|
||||
ansible_facts:
|
||||
description: LogicMonitor properties set for the specified object
|
||||
returned: success
|
||||
type: list
|
||||
example: >
|
||||
{
|
||||
"name": "dc",
|
||||
"value": "1"
|
||||
},
|
||||
{
|
||||
"name": "type",
|
||||
"value": "prod"
|
||||
},
|
||||
{
|
||||
"name": "system.categories",
|
||||
"value": ""
|
||||
},
|
||||
{
|
||||
"name": "snmp.community",
|
||||
"value": "********"
|
||||
}
|
||||
...
|
||||
'''
|
||||
|
||||
import json
|
||||
import socket
|
||||
import types
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.urls import open_url
|
||||
|
||||
|
||||
class LogicMonitor(object):
|
||||
|
||||
def __init__(self, module, **params):
|
||||
self.__version__ = "1.0-python"
|
||||
self.module = module
|
||||
self.module.debug("Instantiating LogicMonitor object")
|
||||
|
||||
self.check_mode = False
|
||||
self.company = params["company"]
|
||||
self.user = params["user"]
|
||||
self.password = params["password"]
|
||||
self.fqdn = socket.getfqdn()
|
||||
self.lm_url = "logicmonitor.com/santaba"
|
||||
self.__version__ = self.__version__ + "-ansible-module"
|
||||
|
||||
def rpc(self, action, params):
|
||||
"""Make a call to the LogicMonitor RPC library
|
||||
and return the response"""
|
||||
self.module.debug("Running LogicMonitor.rpc")
|
||||
|
||||
param_str = urlencode(params)
|
||||
creds = urlencode(
|
||||
{"c": self.company,
|
||||
"u": self.user,
|
||||
"p": self.password})
|
||||
|
||||
if param_str:
|
||||
param_str = param_str + "&"
|
||||
|
||||
param_str = param_str + creds
|
||||
|
||||
try:
|
||||
url = ("https://" + self.company + "." + self.lm_url +
|
||||
"/rpc/" + action + "?" + param_str)
|
||||
|
||||
# Set custom LogicMonitor header with version
|
||||
headers = {"X-LM-User-Agent": self.__version__}
|
||||
|
||||
# Set headers
|
||||
f = open_url(url, headers=headers)
|
||||
|
||||
raw = f.read()
|
||||
resp = json.loads(raw)
|
||||
if resp["status"] == 403:
|
||||
self.module.debug("Authentication failed.")
|
||||
self.fail(msg="Error: " + resp["errmsg"])
|
||||
else:
|
||||
return raw
|
||||
except IOError as ioe:
|
||||
self.fail(msg="Error: Exception making RPC call to " +
|
||||
"https://" + self.company + "." + self.lm_url +
|
||||
"/rpc/" + action + "\nException" + to_native(ioe))
|
||||
|
||||
def get_collectors(self):
|
||||
"""Returns a JSON object containing a list of
|
||||
LogicMonitor collectors"""
|
||||
self.module.debug("Running LogicMonitor.get_collectors...")
|
||||
|
||||
self.module.debug("Making RPC call to 'getAgents'")
|
||||
resp = self.rpc("getAgents", {})
|
||||
resp_json = json.loads(resp)
|
||||
|
||||
if resp_json["status"] == 200:
|
||||
self.module.debug("RPC call succeeded")
|
||||
return resp_json["data"]
|
||||
else:
|
||||
self.fail(msg=resp)
|
||||
|
||||
def get_host_by_hostname(self, hostname, collector):
|
||||
"""Returns a host object for the host matching the
|
||||
specified hostname"""
|
||||
self.module.debug("Running LogicMonitor.get_host_by_hostname...")
|
||||
|
||||
self.module.debug("Looking for hostname " + hostname)
|
||||
self.module.debug("Making RPC call to 'getHosts'")
|
||||
hostlist_json = json.loads(self.rpc("getHosts", {"hostGroupId": 1}))
|
||||
|
||||
if collector:
|
||||
if hostlist_json["status"] == 200:
|
||||
self.module.debug("RPC call succeeded")
|
||||
|
||||
hosts = hostlist_json["data"]["hosts"]
|
||||
|
||||
self.module.debug(
|
||||
"Looking for host matching: hostname " + hostname +
|
||||
" and collector " + str(collector["id"]))
|
||||
|
||||
for host in hosts:
|
||||
if (host["hostName"] == hostname and
|
||||
host["agentId"] == collector["id"]):
|
||||
|
||||
self.module.debug("Host match found")
|
||||
return host
|
||||
self.module.debug("No host match found")
|
||||
return None
|
||||
else:
|
||||
self.module.debug("RPC call failed")
|
||||
self.module.debug(hostlist_json)
|
||||
else:
|
||||
self.module.debug("No collector specified")
|
||||
return None
|
||||
|
||||
def get_host_by_displayname(self, displayname):
|
||||
"""Returns a host object for the host matching the
|
||||
specified display name"""
|
||||
self.module.debug("Running LogicMonitor.get_host_by_displayname...")
|
||||
|
||||
self.module.debug("Looking for displayname " + displayname)
|
||||
self.module.debug("Making RPC call to 'getHost'")
|
||||
host_json = (json.loads(self.rpc("getHost",
|
||||
{"displayName": displayname})))
|
||||
|
||||
if host_json["status"] == 200:
|
||||
self.module.debug("RPC call succeeded")
|
||||
return host_json["data"]
|
||||
else:
|
||||
self.module.debug("RPC call failed")
|
||||
self.module.debug(host_json)
|
||||
return None
|
||||
|
||||
def get_collector_by_description(self, description):
|
||||
"""Returns a JSON collector object for the collector
|
||||
matching the specified FQDN (description)"""
|
||||
self.module.debug(
|
||||
"Running LogicMonitor.get_collector_by_description..."
|
||||
)
|
||||
|
||||
collector_list = self.get_collectors()
|
||||
if collector_list is not None:
|
||||
self.module.debug("Looking for collector with description " +
|
||||
description)
|
||||
for collector in collector_list:
|
||||
if collector["description"] == description:
|
||||
self.module.debug("Collector match found")
|
||||
return collector
|
||||
self.module.debug("No collector match found")
|
||||
return None
|
||||
|
||||
def get_group(self, fullpath):
|
||||
"""Returns a JSON group object for the group matching the
|
||||
specified path"""
|
||||
self.module.debug("Running LogicMonitor.get_group...")
|
||||
|
||||
self.module.debug("Making RPC call to getHostGroups")
|
||||
resp = json.loads(self.rpc("getHostGroups", {}))
|
||||
|
||||
if resp["status"] == 200:
|
||||
self.module.debug("RPC called succeeded")
|
||||
groups = resp["data"]
|
||||
|
||||
self.module.debug("Looking for group matching " + fullpath)
|
||||
for group in groups:
|
||||
if group["fullPath"] == fullpath.lstrip('/'):
|
||||
self.module.debug("Group match found")
|
||||
return group
|
||||
|
||||
self.module.debug("No group match found")
|
||||
return None
|
||||
else:
|
||||
self.module.debug("RPC call failed")
|
||||
self.module.debug(resp)
|
||||
|
||||
return None
|
||||
|
||||
def create_group(self, fullpath):
|
||||
"""Recursively create a path of host groups.
|
||||
Returns the id of the newly created hostgroup"""
|
||||
self.module.debug("Running LogicMonitor.create_group...")
|
||||
|
||||
res = self.get_group(fullpath)
|
||||
if res:
|
||||
self.module.debug("Group " + fullpath + " exists.")
|
||||
return res["id"]
|
||||
|
||||
if fullpath == "/":
|
||||
self.module.debug("Specified group is root. Doing nothing.")
|
||||
return 1
|
||||
else:
|
||||
self.module.debug("Creating group named " + fullpath)
|
||||
self.module.debug("System changed")
|
||||
self.change = True
|
||||
|
||||
if self.check_mode:
|
||||
self.exit(changed=True)
|
||||
|
||||
parentpath, name = fullpath.rsplit('/', 1)
|
||||
parentgroup = self.get_group(parentpath)
|
||||
|
||||
parentid = 1
|
||||
|
||||
if parentpath == "":
|
||||
parentid = 1
|
||||
elif parentgroup:
|
||||
parentid = parentgroup["id"]
|
||||
else:
|
||||
parentid = self.create_group(parentpath)
|
||||
|
||||
h = None
|
||||
|
||||
# Determine if we're creating a group from host or hostgroup class
|
||||
if hasattr(self, '_build_host_group_hash'):
|
||||
h = self._build_host_group_hash(
|
||||
fullpath,
|
||||
self.description,
|
||||
self.properties,
|
||||
self.alertenable)
|
||||
h["name"] = name
|
||||
h["parentId"] = parentid
|
||||
else:
|
||||
h = {"name": name,
|
||||
"parentId": parentid,
|
||||
"alertEnable": True,
|
||||
"description": ""}
|
||||
|
||||
self.module.debug("Making RPC call to 'addHostGroup'")
|
||||
resp = json.loads(
|
||||
self.rpc("addHostGroup", h))
|
||||
|
||||
if resp["status"] == 200:
|
||||
self.module.debug("RPC call succeeded")
|
||||
return resp["data"]["id"]
|
||||
elif resp["errmsg"] == "The record already exists":
|
||||
self.module.debug("The hostgroup already exists")
|
||||
group = self.get_group(fullpath)
|
||||
return group["id"]
|
||||
else:
|
||||
self.module.debug("RPC call failed")
|
||||
self.fail(
|
||||
msg="Error: unable to create new hostgroup \"" + name +
|
||||
"\".\n" + resp["errmsg"])
|
||||
|
||||
def fail(self, msg):
|
||||
self.module.fail_json(msg=msg, changed=self.change)
|
||||
|
||||
def exit(self, changed):
|
||||
self.module.debug("Changed: " + changed)
|
||||
self.module.exit_json(changed=changed)
|
||||
|
||||
def output_info(self, info):
|
||||
self.module.debug("Registering properties as Ansible facts")
|
||||
self.module.exit_json(changed=False, ansible_facts=info)
|
||||
|
||||
|
||||
class Host(LogicMonitor):
|
||||
|
||||
def __init__(self, params, module=None):
|
||||
"""Initializer for the LogicMonitor host object"""
|
||||
self.change = False
|
||||
self.params = params
|
||||
self.collector = None
|
||||
|
||||
LogicMonitor.__init__(self, module, **self.params)
|
||||
self.module.debug("Instantiating Host object")
|
||||
|
||||
if self.params["hostname"]:
|
||||
self.module.debug("Hostname is " + self.params["hostname"])
|
||||
self.hostname = self.params['hostname']
|
||||
else:
|
||||
self.module.debug("No hostname specified. Using " + self.fqdn)
|
||||
self.hostname = self.fqdn
|
||||
|
||||
if self.params["displayname"]:
|
||||
self.module.debug("Display name is " + self.params["displayname"])
|
||||
self.displayname = self.params['displayname']
|
||||
else:
|
||||
self.module.debug("No display name specified. Using " + self.fqdn)
|
||||
self.displayname = self.fqdn
|
||||
|
||||
# Attempt to host information via display name of host name
|
||||
self.module.debug("Attempting to find host by displayname " +
|
||||
self.displayname)
|
||||
info = self.get_host_by_displayname(self.displayname)
|
||||
|
||||
if info is not None:
|
||||
self.module.debug("Host found by displayname")
|
||||
# Used the host information to grab the collector description
|
||||
# if not provided
|
||||
if (not hasattr(self.params, "collector") and
|
||||
"agentDescription" in info):
|
||||
self.module.debug("Setting collector from host response. " +
|
||||
"Collector " + info["agentDescription"])
|
||||
self.params["collector"] = info["agentDescription"]
|
||||
else:
|
||||
self.module.debug("Host not found by displayname")
|
||||
|
||||
# At this point, a valid collector description is required for success
|
||||
# Check that the description exists or fail
|
||||
if self.params["collector"]:
|
||||
self.module.debug("Collector specified is " +
|
||||
self.params["collector"])
|
||||
self.collector = (self.get_collector_by_description(
|
||||
self.params["collector"]))
|
||||
else:
|
||||
self.fail(msg="No collector specified.")
|
||||
|
||||
# If the host wasn't found via displayname, attempt by hostname
|
||||
if info is None:
|
||||
self.module.debug("Attempting to find host by hostname " +
|
||||
self.hostname)
|
||||
info = self.get_host_by_hostname(self.hostname, self.collector)
|
||||
|
||||
self.info = info
|
||||
|
||||
def get_properties(self):
|
||||
"""Returns a hash of the properties
|
||||
associated with this LogicMonitor host"""
|
||||
self.module.debug("Running Host.get_properties...")
|
||||
|
||||
if self.info:
|
||||
self.module.debug("Making RPC call to 'getHostProperties'")
|
||||
properties_json = (json.loads(self.rpc("getHostProperties",
|
||||
{'hostId': self.info["id"],
|
||||
"filterSystemProperties": True})))
|
||||
|
||||
if properties_json["status"] == 200:
|
||||
self.module.debug("RPC call succeeded")
|
||||
return properties_json["data"]
|
||||
else:
|
||||
self.module.debug("Error: there was an issue retrieving the " +
|
||||
"host properties")
|
||||
self.module.debug(properties_json["errmsg"])
|
||||
|
||||
self.fail(msg=properties_json["status"])
|
||||
else:
|
||||
self.module.debug(
|
||||
"Unable to find LogicMonitor host which matches " +
|
||||
self.displayname + " (" + self.hostname + ")"
|
||||
)
|
||||
return None
|
||||
|
||||
def site_facts(self):
|
||||
"""Output current properties information for the Host"""
|
||||
self.module.debug("Running Host.site_facts...")
|
||||
|
||||
if self.info:
|
||||
self.module.debug("Host exists")
|
||||
props = self.get_properties()
|
||||
|
||||
self.output_info(props)
|
||||
else:
|
||||
self.fail(msg="Error: Host doesn't exit.")
|
||||
|
||||
|
||||
class Hostgroup(LogicMonitor):
|
||||
|
||||
def __init__(self, params, module=None):
|
||||
"""Initializer for the LogicMonitor host object"""
|
||||
self.change = False
|
||||
self.params = params
|
||||
|
||||
LogicMonitor.__init__(self, module, **self.params)
|
||||
self.module.debug("Instantiating Hostgroup object")
|
||||
|
||||
self.fullpath = self.params["fullpath"]
|
||||
self.info = self.get_group(self.fullpath)
|
||||
|
||||
def get_properties(self, final=False):
|
||||
"""Returns a hash of the properties
|
||||
associated with this LogicMonitor host"""
|
||||
self.module.debug("Running Hostgroup.get_properties...")
|
||||
|
||||
if self.info:
|
||||
self.module.debug("Group found")
|
||||
|
||||
self.module.debug("Making RPC call to 'getHostGroupProperties'")
|
||||
properties_json = json.loads(self.rpc(
|
||||
"getHostGroupProperties",
|
||||
{'hostGroupId': self.info["id"],
|
||||
"finalResult": final}))
|
||||
|
||||
if properties_json["status"] == 200:
|
||||
self.module.debug("RPC call succeeded")
|
||||
return properties_json["data"]
|
||||
else:
|
||||
self.module.debug("RPC call failed")
|
||||
self.fail(msg=properties_json["status"])
|
||||
else:
|
||||
self.module.debug("Group not found")
|
||||
return None
|
||||
|
||||
def site_facts(self):
|
||||
"""Output current properties information for the Hostgroup"""
|
||||
self.module.debug("Running Hostgroup.site_facts...")
|
||||
|
||||
if self.info:
|
||||
self.module.debug("Group exists")
|
||||
props = self.get_properties(True)
|
||||
|
||||
self.output_info(props)
|
||||
else:
|
||||
self.fail(msg="Error: Group doesn't exit.")
|
||||
|
||||
|
||||
def selector(module):
|
||||
"""Figure out which object and which actions
|
||||
to take given the right parameters"""
|
||||
|
||||
if module.params["target"] == "host":
|
||||
target = Host(module.params, module)
|
||||
target.site_facts()
|
||||
elif module.params["target"] == "hostgroup":
|
||||
# Validate target specific required parameters
|
||||
if module.params["fullpath"] is not None:
|
||||
target = Hostgroup(module.params, module)
|
||||
target.site_facts()
|
||||
else:
|
||||
module.fail_json(
|
||||
msg="Parameter 'fullpath' required for target 'hostgroup'")
|
||||
else:
|
||||
module.fail_json(
|
||||
msg="Error: Unexpected target \"" + module.params["target"] +
|
||||
"\" was specified.")
|
||||
|
||||
|
||||
def main():
|
||||
TARGETS = [
|
||||
"host",
|
||||
"hostgroup"]
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
target=dict(required=True, default=None, choices=TARGETS),
|
||||
company=dict(required=True, default=None),
|
||||
user=dict(required=True, default=None),
|
||||
password=dict(required=True, default=None, no_log=True),
|
||||
|
||||
collector=dict(required=False, default=None),
|
||||
hostname=dict(required=False, default=None),
|
||||
displayname=dict(required=False, default=None),
|
||||
fullpath=dict(required=False, default=None)
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
selector(module)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
171
plugins/modules/monitoring/logstash_plugin.py
Normal file
171
plugins/modules/monitoring/logstash_plugin.py
Normal file
@@ -0,0 +1,171 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# (c) 2017, Loic Blot <loic.blot@unix-experience.fr>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: logstash_plugin
|
||||
short_description: Manage Logstash plugins
|
||||
description:
|
||||
- Manages Logstash plugins.
|
||||
author: Loic Blot (@nerzhul)
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- Install plugin with that name.
|
||||
required: True
|
||||
state:
|
||||
description:
|
||||
- Apply plugin state.
|
||||
choices: ["present", "absent"]
|
||||
default: present
|
||||
plugin_bin:
|
||||
description:
|
||||
- Specify logstash-plugin to use for plugin management.
|
||||
default: /usr/share/logstash/bin/logstash-plugin
|
||||
proxy_host:
|
||||
description:
|
||||
- Proxy host to use during plugin installation.
|
||||
proxy_port:
|
||||
description:
|
||||
- Proxy port to use during plugin installation.
|
||||
version:
|
||||
description:
|
||||
- Specify plugin Version of the plugin to install.
|
||||
If plugin exists with previous version, it will NOT be updated.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Install Logstash beats input plugin
|
||||
logstash_plugin:
|
||||
state: present
|
||||
name: logstash-input-beats
|
||||
|
||||
- name: Install specific version of a plugin
|
||||
logstash_plugin:
|
||||
state: present
|
||||
name: logstash-input-syslog
|
||||
version: '3.2.0'
|
||||
|
||||
- name: Uninstall Logstash plugin
|
||||
logstash_plugin:
|
||||
state: absent
|
||||
name: logstash-filter-multiline
|
||||
|
||||
- name: install Logstash plugin with alternate heap size
|
||||
logstash_plugin:
|
||||
state: present
|
||||
name: logstash-input-beats
|
||||
environment:
|
||||
LS_JAVA_OPTS: "-Xms256m -Xmx256m"
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
PACKAGE_STATE_MAP = dict(
|
||||
present="install",
|
||||
absent="remove"
|
||||
)
|
||||
|
||||
|
||||
def is_plugin_present(module, plugin_bin, plugin_name):
|
||||
cmd_args = [plugin_bin, "list", plugin_name]
|
||||
rc, out, err = module.run_command(" ".join(cmd_args))
|
||||
return rc == 0
|
||||
|
||||
|
||||
def parse_error(string):
|
||||
reason = "reason: "
|
||||
try:
|
||||
return string[string.index(reason) + len(reason):].strip()
|
||||
except ValueError:
|
||||
return string
|
||||
|
||||
|
||||
def install_plugin(module, plugin_bin, plugin_name, version, proxy_host, proxy_port):
|
||||
cmd_args = [plugin_bin, PACKAGE_STATE_MAP["present"], plugin_name]
|
||||
|
||||
if version:
|
||||
cmd_args.append("--version %s" % version)
|
||||
|
||||
if proxy_host and proxy_port:
|
||||
cmd_args.append("-DproxyHost=%s -DproxyPort=%s" % (proxy_host, proxy_port))
|
||||
|
||||
cmd = " ".join(cmd_args)
|
||||
|
||||
if module.check_mode:
|
||||
rc, out, err = 0, "check mode", ""
|
||||
else:
|
||||
rc, out, err = module.run_command(cmd)
|
||||
|
||||
if rc != 0:
|
||||
reason = parse_error(out)
|
||||
module.fail_json(msg=reason)
|
||||
|
||||
return True, cmd, out, err
|
||||
|
||||
|
||||
def remove_plugin(module, plugin_bin, plugin_name):
|
||||
cmd_args = [plugin_bin, PACKAGE_STATE_MAP["absent"], plugin_name]
|
||||
|
||||
cmd = " ".join(cmd_args)
|
||||
|
||||
if module.check_mode:
|
||||
rc, out, err = 0, "check mode", ""
|
||||
else:
|
||||
rc, out, err = module.run_command(cmd)
|
||||
|
||||
if rc != 0:
|
||||
reason = parse_error(out)
|
||||
module.fail_json(msg=reason)
|
||||
|
||||
return True, cmd, out, err
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(required=True),
|
||||
state=dict(default="present", choices=PACKAGE_STATE_MAP.keys()),
|
||||
plugin_bin=dict(default="/usr/share/logstash/bin/logstash-plugin", type="path"),
|
||||
proxy_host=dict(default=None),
|
||||
proxy_port=dict(default=None),
|
||||
version=dict(default=None)
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
name = module.params["name"]
|
||||
state = module.params["state"]
|
||||
plugin_bin = module.params["plugin_bin"]
|
||||
proxy_host = module.params["proxy_host"]
|
||||
proxy_port = module.params["proxy_port"]
|
||||
version = module.params["version"]
|
||||
|
||||
present = is_plugin_present(module, plugin_bin, name)
|
||||
|
||||
# skip if the state is correct
|
||||
if (present and state == "present") or (state == "absent" and not present):
|
||||
module.exit_json(changed=False, name=name, state=state)
|
||||
|
||||
if state == "present":
|
||||
changed, cmd, out, err = install_plugin(module, plugin_bin, name, version, proxy_host, proxy_port)
|
||||
elif state == "absent":
|
||||
changed, cmd, out, err = remove_plugin(module, plugin_bin, name)
|
||||
|
||||
module.exit_json(changed=changed, cmd=cmd, name=name, state=state, stdout=out, stderr=err)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
211
plugins/modules/monitoring/monit.py
Normal file
211
plugins/modules/monitoring/monit.py
Normal file
@@ -0,0 +1,211 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, Darryl Stoflet <stoflet@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: monit
|
||||
short_description: Manage the state of a program monitored via Monit
|
||||
description:
|
||||
- Manage the state of a program monitored via I(Monit)
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the I(monit) program/process to manage
|
||||
required: true
|
||||
state:
|
||||
description:
|
||||
- The state of service
|
||||
required: true
|
||||
choices: [ "present", "started", "stopped", "restarted", "monitored", "unmonitored", "reloaded" ]
|
||||
timeout:
|
||||
description:
|
||||
- If there are pending actions for the service monitored by monit, then Ansible will check
|
||||
for up to this many seconds to verify the requested action has been performed.
|
||||
Ansible will sleep for five seconds between each check.
|
||||
default: 300
|
||||
author: "Darryl Stoflet (@dstoflet)"
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Manage the state of program "httpd" to be in "started" state.
|
||||
- monit:
|
||||
name: httpd
|
||||
state: started
|
||||
'''
|
||||
|
||||
import time
|
||||
import re
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def main():
|
||||
arg_spec = dict(
|
||||
name=dict(required=True),
|
||||
timeout=dict(default=300, type='int'),
|
||||
state=dict(required=True, choices=['present', 'started', 'restarted', 'stopped', 'monitored', 'unmonitored', 'reloaded'])
|
||||
)
|
||||
|
||||
module = AnsibleModule(argument_spec=arg_spec, supports_check_mode=True)
|
||||
|
||||
name = module.params['name']
|
||||
state = module.params['state']
|
||||
timeout = module.params['timeout']
|
||||
|
||||
MONIT = module.get_bin_path('monit', True)
|
||||
|
||||
def monit_version():
|
||||
rc, out, err = module.run_command('%s -V' % MONIT, check_rc=True)
|
||||
version_line = out.split('\n')[0]
|
||||
version = re.search(r"[0-9]+\.[0-9]+", version_line).group().split('.')
|
||||
# Use only major and minor even if there are more these should be enough
|
||||
return int(version[0]), int(version[1])
|
||||
|
||||
def is_version_higher_than_5_18():
|
||||
return (MONIT_MAJOR_VERSION, MONIT_MINOR_VERSION) > (5, 18)
|
||||
|
||||
def parse(parts):
|
||||
if is_version_higher_than_5_18():
|
||||
return parse_current(parts)
|
||||
else:
|
||||
return parse_older_versions(parts)
|
||||
|
||||
def parse_older_versions(parts):
|
||||
if len(parts) > 2 and parts[0].lower() == 'process' and parts[1] == "'%s'" % name:
|
||||
return ' '.join(parts[2:]).lower()
|
||||
else:
|
||||
return ''
|
||||
|
||||
def parse_current(parts):
|
||||
if len(parts) > 2 and parts[2].lower() == 'process' and parts[0] == name:
|
||||
return ''.join(parts[1]).lower()
|
||||
else:
|
||||
return ''
|
||||
|
||||
def get_status():
|
||||
"""Return the status of the process in monit, or the empty string if not present."""
|
||||
rc, out, err = module.run_command('%s %s' % (MONIT, SUMMARY_COMMAND), check_rc=True)
|
||||
for line in out.split('\n'):
|
||||
# Sample output lines:
|
||||
# Process 'name' Running
|
||||
# Process 'name' Running - restart pending
|
||||
parts = parse(line.split())
|
||||
if parts != '':
|
||||
return parts
|
||||
|
||||
return ''
|
||||
|
||||
def run_command(command):
|
||||
"""Runs a monit command, and returns the new status."""
|
||||
module.run_command('%s %s %s' % (MONIT, command, name), check_rc=True)
|
||||
return get_status()
|
||||
|
||||
def wait_for_monit_to_stop_pending():
|
||||
"""Fails this run if there is no status or it's pending/initializing for timeout"""
|
||||
timeout_time = time.time() + timeout
|
||||
sleep_time = 5
|
||||
|
||||
running_status = get_status()
|
||||
while running_status == '' or 'pending' in running_status or 'initializing' in running_status:
|
||||
if time.time() >= timeout_time:
|
||||
module.fail_json(
|
||||
msg='waited too long for "pending", or "initiating" status to go away ({0})'.format(
|
||||
running_status
|
||||
),
|
||||
state=state
|
||||
)
|
||||
|
||||
time.sleep(sleep_time)
|
||||
running_status = get_status()
|
||||
|
||||
MONIT_MAJOR_VERSION, MONIT_MINOR_VERSION = monit_version()
|
||||
|
||||
SUMMARY_COMMAND = ('summary', 'summary -B')[is_version_higher_than_5_18()]
|
||||
|
||||
if state == 'reloaded':
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
rc, out, err = module.run_command('%s reload' % MONIT)
|
||||
if rc != 0:
|
||||
module.fail_json(msg='monit reload failed', stdout=out, stderr=err)
|
||||
wait_for_monit_to_stop_pending()
|
||||
module.exit_json(changed=True, name=name, state=state)
|
||||
|
||||
present = get_status() != ''
|
||||
|
||||
if not present and not state == 'present':
|
||||
module.fail_json(msg='%s process not presently configured with monit' % name, name=name, state=state)
|
||||
|
||||
if state == 'present':
|
||||
if not present:
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
status = run_command('reload')
|
||||
if status == '':
|
||||
wait_for_monit_to_stop_pending()
|
||||
module.exit_json(changed=True, name=name, state=state)
|
||||
module.exit_json(changed=False, name=name, state=state)
|
||||
|
||||
wait_for_monit_to_stop_pending()
|
||||
running = 'running' in get_status()
|
||||
|
||||
if running and state in ['started', 'monitored']:
|
||||
module.exit_json(changed=False, name=name, state=state)
|
||||
|
||||
if running and state == 'stopped':
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
status = run_command('stop')
|
||||
if status in ['not monitored'] or 'stop pending' in status:
|
||||
module.exit_json(changed=True, name=name, state=state)
|
||||
module.fail_json(msg='%s process not stopped' % name, status=status)
|
||||
|
||||
if running and state == 'unmonitored':
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
status = run_command('unmonitor')
|
||||
if status in ['not monitored'] or 'unmonitor pending' in status:
|
||||
module.exit_json(changed=True, name=name, state=state)
|
||||
module.fail_json(msg='%s process not unmonitored' % name, status=status)
|
||||
|
||||
elif state == 'restarted':
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
status = run_command('restart')
|
||||
if status in ['initializing', 'running'] or 'restart pending' in status:
|
||||
module.exit_json(changed=True, name=name, state=state)
|
||||
module.fail_json(msg='%s process not restarted' % name, status=status)
|
||||
|
||||
elif not running and state == 'started':
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
status = run_command('start')
|
||||
if status in ['initializing', 'running'] or 'start pending' in status:
|
||||
module.exit_json(changed=True, name=name, state=state)
|
||||
module.fail_json(msg='%s process not started' % name, status=status)
|
||||
|
||||
elif not running and state == 'monitored':
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
status = run_command('monitor')
|
||||
if status not in ['not monitored']:
|
||||
module.exit_json(changed=True, name=name, state=state)
|
||||
module.fail_json(msg='%s process not monitored' % name, status=status)
|
||||
|
||||
module.exit_json(changed=False, name=name, state=state)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
1086
plugins/modules/monitoring/nagios.py
Normal file
1086
plugins/modules/monitoring/nagios.py
Normal file
File diff suppressed because it is too large
Load Diff
141
plugins/modules/monitoring/newrelic_deployment.py
Normal file
141
plugins/modules/monitoring/newrelic_deployment.py
Normal file
@@ -0,0 +1,141 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2013 Matt Coddington <coddington@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: newrelic_deployment
|
||||
author: "Matt Coddington (@mcodd)"
|
||||
short_description: Notify newrelic about app deployments
|
||||
description:
|
||||
- Notify newrelic about app deployments (see https://docs.newrelic.com/docs/apm/new-relic-apm/maintenance/deployment-notifications#api)
|
||||
options:
|
||||
token:
|
||||
description:
|
||||
- API token, to place in the x-api-key header.
|
||||
required: true
|
||||
app_name:
|
||||
description:
|
||||
- (one of app_name or application_id are required) The value of app_name in the newrelic.yml file used by the application
|
||||
required: false
|
||||
application_id:
|
||||
description:
|
||||
- (one of app_name or application_id are required) The application id, found in the URL when viewing the application in RPM
|
||||
required: false
|
||||
changelog:
|
||||
description:
|
||||
- A list of changes for this deployment
|
||||
required: false
|
||||
description:
|
||||
description:
|
||||
- Text annotation for the deployment - notes for you
|
||||
required: false
|
||||
revision:
|
||||
description:
|
||||
- A revision number (e.g., git commit SHA)
|
||||
required: false
|
||||
user:
|
||||
description:
|
||||
- The name of the user/process that triggered this deployment
|
||||
required: false
|
||||
appname:
|
||||
description:
|
||||
- Name of the application
|
||||
required: false
|
||||
environment:
|
||||
description:
|
||||
- The environment for this deployment
|
||||
required: false
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
required: false
|
||||
default: 'yes'
|
||||
type: bool
|
||||
|
||||
requirements: []
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- newrelic_deployment:
|
||||
token: AAAAAA
|
||||
app_name: myapp
|
||||
user: ansible deployment
|
||||
revision: '1.0'
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||
|
||||
# ===========================================
|
||||
# Module execution.
|
||||
#
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
token=dict(required=True, no_log=True),
|
||||
app_name=dict(required=False),
|
||||
application_id=dict(required=False),
|
||||
changelog=dict(required=False),
|
||||
description=dict(required=False),
|
||||
revision=dict(required=False),
|
||||
user=dict(required=False),
|
||||
appname=dict(required=False),
|
||||
environment=dict(required=False),
|
||||
validate_certs=dict(default='yes', type='bool'),
|
||||
),
|
||||
required_one_of=[['app_name', 'application_id']],
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
# build list of params
|
||||
params = {}
|
||||
if module.params["app_name"] and module.params["application_id"]:
|
||||
module.fail_json(msg="only one of 'app_name' or 'application_id' can be set")
|
||||
|
||||
if module.params["app_name"]:
|
||||
params["app_name"] = module.params["app_name"]
|
||||
elif module.params["application_id"]:
|
||||
params["application_id"] = module.params["application_id"]
|
||||
else:
|
||||
module.fail_json(msg="you must set one of 'app_name' or 'application_id'")
|
||||
|
||||
for item in ["changelog", "description", "revision", "user", "appname", "environment"]:
|
||||
if module.params[item]:
|
||||
params[item] = module.params[item]
|
||||
|
||||
# If we're in check mode, just exit pretending like we succeeded
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
|
||||
# Send the data to NewRelic
|
||||
url = "https://rpm.newrelic.com/deployments.xml"
|
||||
data = urlencode(params)
|
||||
headers = {
|
||||
'x-api-key': module.params["token"],
|
||||
}
|
||||
response, info = fetch_url(module, url, data=data, headers=headers)
|
||||
if info['status'] in (200, 201):
|
||||
module.exit_json(changed=True)
|
||||
else:
|
||||
module.fail_json(msg="unable to update newrelic: %s" % info['msg'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
274
plugins/modules/monitoring/pagerduty.py
Normal file
274
plugins/modules/monitoring/pagerduty.py
Normal file
@@ -0,0 +1,274 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright: Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
|
||||
module: pagerduty
|
||||
short_description: Create PagerDuty maintenance windows
|
||||
description:
|
||||
- This module will let you create PagerDuty maintenance windows
|
||||
author:
|
||||
- "Andrew Newdigate (@suprememoocow)"
|
||||
- "Dylan Silva (@thaumos)"
|
||||
- "Justin Johns (!UNKNOWN)"
|
||||
- "Bruce Pennypacker (@bpennypacker)"
|
||||
requirements:
|
||||
- PagerDuty API access
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- Create a maintenance window or get a list of ongoing windows.
|
||||
required: true
|
||||
choices: [ "running", "started", "ongoing", "absent" ]
|
||||
name:
|
||||
description:
|
||||
- PagerDuty unique subdomain. Obsolete. It is not used with PagerDuty REST v2 API.
|
||||
user:
|
||||
description:
|
||||
- PagerDuty user ID. Obsolete. Please, use I(token) for authorization.
|
||||
token:
|
||||
description:
|
||||
- A pagerduty token, generated on the pagerduty site. It is used for authorization.
|
||||
required: true
|
||||
requester_id:
|
||||
description:
|
||||
- ID of user making the request. Only needed when creating a maintenance_window.
|
||||
service:
|
||||
description:
|
||||
- A comma separated list of PagerDuty service IDs.
|
||||
aliases: [ services ]
|
||||
window_id:
|
||||
description:
|
||||
- ID of maintenance window. Only needed when absent a maintenance_window.
|
||||
hours:
|
||||
description:
|
||||
- Length of maintenance window in hours.
|
||||
default: 1
|
||||
minutes:
|
||||
description:
|
||||
- Maintenance window in minutes (this is added to the hours).
|
||||
default: 0
|
||||
desc:
|
||||
description:
|
||||
- Short description of maintenance window.
|
||||
default: Created by Ansible
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
type: bool
|
||||
default: 'yes'
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# List ongoing maintenance windows using a token
|
||||
- pagerduty:
|
||||
name: companyabc
|
||||
token: xxxxxxxxxxxxxx
|
||||
state: ongoing
|
||||
|
||||
# Create a 1 hour maintenance window for service FOO123
|
||||
- pagerduty:
|
||||
name: companyabc
|
||||
user: example@example.com
|
||||
token: yourtoken
|
||||
state: running
|
||||
service: FOO123
|
||||
|
||||
# Create a 5 minute maintenance window for service FOO123
|
||||
- pagerduty:
|
||||
name: companyabc
|
||||
token: xxxxxxxxxxxxxx
|
||||
hours: 0
|
||||
minutes: 5
|
||||
state: running
|
||||
service: FOO123
|
||||
|
||||
|
||||
# Create a 4 hour maintenance window for service FOO123 with the description "deployment".
|
||||
- pagerduty:
|
||||
name: companyabc
|
||||
user: example@example.com
|
||||
state: running
|
||||
service: FOO123
|
||||
hours: 4
|
||||
desc: deployment
|
||||
register: pd_window
|
||||
|
||||
# Delete the previous maintenance window
|
||||
- pagerduty:
|
||||
name: companyabc
|
||||
user: example@example.com
|
||||
state: absent
|
||||
window_id: '{{ pd_window.result.maintenance_window.id }}'
|
||||
|
||||
# Delete a maintenance window from a separate playbook than its creation, and if it is the only existing maintenance window.
|
||||
- pagerduty:
|
||||
requester_id: XXXXXXX
|
||||
token: yourtoken
|
||||
state: ongoing
|
||||
register: pd_window
|
||||
|
||||
- pagerduty:
|
||||
requester_id: XXXXXXX
|
||||
token: yourtoken
|
||||
state: absent
|
||||
window_id: "{{ pd_window.result.maintenance_windows[0].id }}"
|
||||
|
||||
'''
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import base64
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
from ansible.module_utils._text import to_bytes
|
||||
|
||||
|
||||
class PagerDutyRequest(object):
|
||||
def __init__(self, module, name, user, token):
|
||||
self.module = module
|
||||
self.name = name
|
||||
self.user = user
|
||||
self.token = token
|
||||
self.headers = {
|
||||
'Content-Type': 'application/json',
|
||||
"Authorization": self._auth_header(),
|
||||
'Accept': 'application/vnd.pagerduty+json;version=2'
|
||||
}
|
||||
|
||||
def ongoing(self, http_call=fetch_url):
|
||||
url = "https://api.pagerduty.com/maintenance_windows?filter=ongoing"
|
||||
headers = dict(self.headers)
|
||||
|
||||
response, info = http_call(self.module, url, headers=headers)
|
||||
if info['status'] != 200:
|
||||
self.module.fail_json(msg="failed to lookup the ongoing window: %s" % info['msg'])
|
||||
|
||||
json_out = self._read_response(response)
|
||||
|
||||
return False, json_out, False
|
||||
|
||||
def create(self, requester_id, service, hours, minutes, desc, http_call=fetch_url):
|
||||
if not requester_id:
|
||||
self.module.fail_json(msg="requester_id is required when maintenance window should be created")
|
||||
|
||||
url = 'https://api.pagerduty.com/maintenance_windows'
|
||||
|
||||
headers = dict(self.headers)
|
||||
headers.update({'From': requester_id})
|
||||
|
||||
start, end = self._compute_start_end_time(hours, minutes)
|
||||
services = self._create_services_payload(service)
|
||||
|
||||
request_data = {'maintenance_window': {'start_time': start, 'end_time': end, 'description': desc, 'services': services}}
|
||||
|
||||
data = json.dumps(request_data)
|
||||
response, info = http_call(self.module, url, data=data, headers=headers, method='POST')
|
||||
if info['status'] != 201:
|
||||
self.module.fail_json(msg="failed to create the window: %s" % info['msg'])
|
||||
|
||||
json_out = self._read_response(response)
|
||||
|
||||
return False, json_out, True
|
||||
|
||||
def _create_services_payload(self, service):
|
||||
if (isinstance(service, list)):
|
||||
return [{'id': s, 'type': 'service_reference'} for s in service]
|
||||
else:
|
||||
return [{'id': service, 'type': 'service_reference'}]
|
||||
|
||||
def _compute_start_end_time(self, hours, minutes):
|
||||
now = datetime.datetime.utcnow()
|
||||
later = now + datetime.timedelta(hours=int(hours), minutes=int(minutes))
|
||||
start = now.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
end = later.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
return start, end
|
||||
|
||||
def absent(self, window_id, http_call=fetch_url):
|
||||
url = "https://api.pagerduty.com/maintenance_windows/" + window_id
|
||||
headers = dict(self.headers)
|
||||
|
||||
response, info = http_call(self.module, url, headers=headers, method='DELETE')
|
||||
if info['status'] != 204:
|
||||
self.module.fail_json(msg="failed to delete the window: %s" % info['msg'])
|
||||
|
||||
json_out = self._read_response(response)
|
||||
|
||||
return False, json_out, True
|
||||
|
||||
def _auth_header(self):
|
||||
return "Token token=%s" % self.token
|
||||
|
||||
def _read_response(self, response):
|
||||
try:
|
||||
return json.loads(response.read())
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
state=dict(required=True, choices=['running', 'started', 'ongoing', 'absent']),
|
||||
name=dict(required=False),
|
||||
user=dict(required=False),
|
||||
token=dict(required=True, no_log=True),
|
||||
service=dict(required=False, type='list', aliases=["services"]),
|
||||
window_id=dict(required=False),
|
||||
requester_id=dict(required=False),
|
||||
hours=dict(default='1', required=False),
|
||||
minutes=dict(default='0', required=False),
|
||||
desc=dict(default='Created by Ansible', required=False),
|
||||
validate_certs=dict(default='yes', type='bool'),
|
||||
)
|
||||
)
|
||||
|
||||
state = module.params['state']
|
||||
name = module.params['name']
|
||||
user = module.params['user']
|
||||
service = module.params['service']
|
||||
window_id = module.params['window_id']
|
||||
hours = module.params['hours']
|
||||
minutes = module.params['minutes']
|
||||
token = module.params['token']
|
||||
desc = module.params['desc']
|
||||
requester_id = module.params['requester_id']
|
||||
|
||||
pd = PagerDutyRequest(module, name, user, token)
|
||||
|
||||
if state == "running" or state == "started":
|
||||
if not service:
|
||||
module.fail_json(msg="service not specified")
|
||||
(rc, out, changed) = pd.create(requester_id, service, hours, minutes, desc)
|
||||
if rc == 0:
|
||||
changed = True
|
||||
|
||||
if state == "ongoing":
|
||||
(rc, out, changed) = pd.ongoing()
|
||||
|
||||
if state == "absent":
|
||||
(rc, out, changed) = pd.absent(window_id)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed", result=out)
|
||||
|
||||
module.exit_json(msg="success", result=out, changed=changed)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
251
plugins/modules/monitoring/pagerduty_alert.py
Normal file
251
plugins/modules/monitoring/pagerduty_alert.py
Normal file
@@ -0,0 +1,251 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright: Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
|
||||
module: pagerduty_alert
|
||||
short_description: Trigger, acknowledge or resolve PagerDuty incidents
|
||||
description:
|
||||
- This module will let you trigger, acknowledge or resolve a PagerDuty incident by sending events
|
||||
author:
|
||||
- "Amanpreet Singh (@ApsOps)"
|
||||
requirements:
|
||||
- PagerDuty API access
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- PagerDuty unique subdomain. Obsolete. It is not used with PagerDuty REST v2 API.
|
||||
service_id:
|
||||
description:
|
||||
- ID of PagerDuty service when incidents will be triggered, acknowledged or resolved.
|
||||
required: true
|
||||
service_key:
|
||||
description:
|
||||
- The GUID of one of your "Generic API" services. Obsolete. Please use I(integration_key).
|
||||
integration_key:
|
||||
description:
|
||||
- The GUID of one of your "Generic API" services.
|
||||
- This is the "integration key" listed on a "Integrations" tab of PagerDuty service.
|
||||
required: true
|
||||
state:
|
||||
description:
|
||||
- Type of event to be sent.
|
||||
required: true
|
||||
choices:
|
||||
- 'triggered'
|
||||
- 'acknowledged'
|
||||
- 'resolved'
|
||||
api_key:
|
||||
description:
|
||||
- The pagerduty API key (readonly access), generated on the pagerduty site.
|
||||
required: true
|
||||
desc:
|
||||
description:
|
||||
- For C(triggered) I(state) - Required. Short description of the problem that led to this trigger. This field (or a truncated version)
|
||||
will be used when generating phone calls, SMS messages and alert emails. It will also appear on the incidents tables in the PagerDuty UI.
|
||||
The maximum length is 1024 characters.
|
||||
- For C(acknowledged) or C(resolved) I(state) - Text that will appear in the incident's log associated with this event.
|
||||
required: false
|
||||
default: Created via Ansible
|
||||
incident_key:
|
||||
description:
|
||||
- Identifies the incident to which this I(state) should be applied.
|
||||
- For C(triggered) I(state) - If there's no open (i.e. unresolved) incident with this key, a new one will be created. If there's already an
|
||||
open incident with a matching key, this event will be appended to that incident's log. The event key provides an easy way to "de-dup"
|
||||
problem reports.
|
||||
- For C(acknowledged) or C(resolved) I(state) - This should be the incident_key you received back when the incident was first opened by a
|
||||
trigger event. Acknowledge events referencing resolved or nonexistent incidents will be discarded.
|
||||
required: false
|
||||
client:
|
||||
description:
|
||||
- The name of the monitoring client that is triggering this event.
|
||||
required: false
|
||||
client_url:
|
||||
description:
|
||||
- The URL of the monitoring client that is triggering this event.
|
||||
required: false
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Trigger an incident with just the basic options
|
||||
- pagerduty_alert:
|
||||
name: companyabc
|
||||
integration_key: xxx
|
||||
api_key: yourapikey
|
||||
service_id: PDservice
|
||||
state: triggered
|
||||
desc: problem that led to this trigger
|
||||
|
||||
# Trigger an incident with more options
|
||||
- pagerduty_alert:
|
||||
integration_key: xxx
|
||||
api_key: yourapikey
|
||||
service_id: PDservice
|
||||
state: triggered
|
||||
desc: problem that led to this trigger
|
||||
incident_key: somekey
|
||||
client: Sample Monitoring Service
|
||||
client_url: http://service.example.com
|
||||
|
||||
# Acknowledge an incident based on incident_key
|
||||
- pagerduty_alert:
|
||||
integration_key: xxx
|
||||
api_key: yourapikey
|
||||
service_id: PDservice
|
||||
state: acknowledged
|
||||
incident_key: somekey
|
||||
desc: "some text for incident's log"
|
||||
|
||||
# Resolve an incident based on incident_key
|
||||
- pagerduty_alert:
|
||||
integration_key: xxx
|
||||
api_key: yourapikey
|
||||
service_id: PDservice
|
||||
state: resolved
|
||||
incident_key: somekey
|
||||
desc: "some text for incident's log"
|
||||
'''
|
||||
import json
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlparse, urlencode, urlunparse
|
||||
|
||||
|
||||
def check(module, name, state, service_id, integration_key, api_key, incident_key=None, http_call=fetch_url):
|
||||
url = 'https://api.pagerduty.com/incidents'
|
||||
headers = {
|
||||
"Content-type": "application/json",
|
||||
"Authorization": "Token token=%s" % api_key,
|
||||
'Accept': 'application/vnd.pagerduty+json;version=2'
|
||||
}
|
||||
|
||||
params = {
|
||||
'service_ids[]': service_id,
|
||||
'sort_by': 'incident_number:desc',
|
||||
'time_zone': 'UTC'
|
||||
}
|
||||
if incident_key:
|
||||
params['incident_key'] = incident_key
|
||||
|
||||
url_parts = list(urlparse(url))
|
||||
url_parts[4] = urlencode(params, True)
|
||||
|
||||
url = urlunparse(url_parts)
|
||||
|
||||
response, info = http_call(module, url, method='get', headers=headers)
|
||||
|
||||
if info['status'] != 200:
|
||||
module.fail_json(msg="failed to check current incident status."
|
||||
"Reason: %s" % info['msg'])
|
||||
|
||||
incidents = json.loads(response.read())["incidents"]
|
||||
msg = "No corresponding incident"
|
||||
|
||||
if len(incidents) == 0:
|
||||
if state in ('acknowledged', 'resolved'):
|
||||
return msg, False
|
||||
return msg, True
|
||||
elif state != incidents[0]["status"]:
|
||||
return incidents[0], True
|
||||
|
||||
return incidents[0], False
|
||||
|
||||
|
||||
def send_event(module, service_key, event_type, desc,
|
||||
incident_key=None, client=None, client_url=None):
|
||||
url = "https://events.pagerduty.com/generic/2010-04-15/create_event.json"
|
||||
headers = {
|
||||
"Content-type": "application/json"
|
||||
}
|
||||
|
||||
data = {
|
||||
"service_key": service_key,
|
||||
"event_type": event_type,
|
||||
"incident_key": incident_key,
|
||||
"description": desc,
|
||||
"client": client,
|
||||
"client_url": client_url
|
||||
}
|
||||
|
||||
response, info = fetch_url(module, url, method='post',
|
||||
headers=headers, data=json.dumps(data))
|
||||
if info['status'] != 200:
|
||||
module.fail_json(msg="failed to %s. Reason: %s" %
|
||||
(event_type, info['msg']))
|
||||
json_out = json.loads(response.read())
|
||||
return json_out
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
name=dict(required=False),
|
||||
service_id=dict(required=True),
|
||||
service_key=dict(required=False),
|
||||
integration_key=dict(required=False),
|
||||
api_key=dict(required=True),
|
||||
state=dict(required=True,
|
||||
choices=['triggered', 'acknowledged', 'resolved']),
|
||||
client=dict(required=False, default=None),
|
||||
client_url=dict(required=False, default=None),
|
||||
desc=dict(required=False, default='Created via Ansible'),
|
||||
incident_key=dict(required=False, default=None)
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
name = module.params['name']
|
||||
service_id = module.params['service_id']
|
||||
integration_key = module.params['integration_key']
|
||||
service_key = module.params['service_key']
|
||||
api_key = module.params['api_key']
|
||||
state = module.params['state']
|
||||
client = module.params['client']
|
||||
client_url = module.params['client_url']
|
||||
desc = module.params['desc']
|
||||
incident_key = module.params['incident_key']
|
||||
|
||||
if integration_key is None:
|
||||
if service_key is not None:
|
||||
integration_key = service_key
|
||||
module.warn('"service_key" is obsolete parameter and will be removed.'
|
||||
' Please, use "integration_key" instead')
|
||||
else:
|
||||
module.fail_json(msg="'integration_key' is required parameter")
|
||||
|
||||
state_event_dict = {
|
||||
'triggered': 'trigger',
|
||||
'acknowledged': 'acknowledge',
|
||||
'resolved': 'resolve'
|
||||
}
|
||||
|
||||
event_type = state_event_dict[state]
|
||||
|
||||
if event_type != 'trigger' and incident_key is None:
|
||||
module.fail_json(msg="incident_key is required for "
|
||||
"acknowledge or resolve events")
|
||||
|
||||
out, changed = check(module, name, state, service_id,
|
||||
integration_key, api_key, incident_key)
|
||||
|
||||
if not module.check_mode and changed is True:
|
||||
out = send_event(module, integration_key, event_type, desc,
|
||||
incident_key, client, client_url)
|
||||
|
||||
module.exit_json(result=out, changed=changed)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
141
plugins/modules/monitoring/pingdom.py
Normal file
141
plugins/modules/monitoring/pingdom.py
Normal file
@@ -0,0 +1,141 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright: Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
|
||||
module: pingdom
|
||||
short_description: Pause/unpause Pingdom alerts
|
||||
description:
|
||||
- This module will let you pause/unpause Pingdom alerts
|
||||
author:
|
||||
- "Dylan Silva (@thaumos)"
|
||||
- "Justin Johns (!UNKNOWN)"
|
||||
requirements:
|
||||
- "This pingdom python library: https://github.com/mbabineau/pingdom-python"
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- Define whether or not the check should be running or paused.
|
||||
required: true
|
||||
choices: [ "running", "paused" ]
|
||||
checkid:
|
||||
description:
|
||||
- Pingdom ID of the check.
|
||||
required: true
|
||||
uid:
|
||||
description:
|
||||
- Pingdom user ID.
|
||||
required: true
|
||||
passwd:
|
||||
description:
|
||||
- Pingdom user password.
|
||||
required: true
|
||||
key:
|
||||
description:
|
||||
- Pingdom API key.
|
||||
required: true
|
||||
notes:
|
||||
- This module does not yet have support to add/remove checks.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Pause the check with the ID of 12345.
|
||||
- pingdom:
|
||||
uid: example@example.com
|
||||
passwd: password123
|
||||
key: apipassword123
|
||||
checkid: 12345
|
||||
state: paused
|
||||
|
||||
# Unpause the check with the ID of 12345.
|
||||
- pingdom:
|
||||
uid: example@example.com
|
||||
passwd: password123
|
||||
key: apipassword123
|
||||
checkid: 12345
|
||||
state: running
|
||||
'''
|
||||
|
||||
import traceback
|
||||
|
||||
PINGDOM_IMP_ERR = None
|
||||
try:
|
||||
import pingdom
|
||||
HAS_PINGDOM = True
|
||||
except Exception:
|
||||
PINGDOM_IMP_ERR = traceback.format_exc()
|
||||
HAS_PINGDOM = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
|
||||
|
||||
def pause(checkid, uid, passwd, key):
|
||||
|
||||
c = pingdom.PingdomConnection(uid, passwd, key)
|
||||
c.modify_check(checkid, paused=True)
|
||||
check = c.get_check(checkid)
|
||||
name = check.name
|
||||
result = check.status
|
||||
# if result != "paused": # api output buggy - accept raw exception for now
|
||||
# return (True, name, result)
|
||||
return (False, name, result)
|
||||
|
||||
|
||||
def unpause(checkid, uid, passwd, key):
|
||||
|
||||
c = pingdom.PingdomConnection(uid, passwd, key)
|
||||
c.modify_check(checkid, paused=False)
|
||||
check = c.get_check(checkid)
|
||||
name = check.name
|
||||
result = check.status
|
||||
# if result != "up": # api output buggy - accept raw exception for now
|
||||
# return (True, name, result)
|
||||
return (False, name, result)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
state=dict(required=True, choices=['running', 'paused', 'started', 'stopped']),
|
||||
checkid=dict(required=True),
|
||||
uid=dict(required=True),
|
||||
passwd=dict(required=True, no_log=True),
|
||||
key=dict(required=True)
|
||||
)
|
||||
)
|
||||
|
||||
if not HAS_PINGDOM:
|
||||
module.fail_json(msg=missing_required_lib("pingdom"), exception=PINGDOM_IMP_ERR)
|
||||
|
||||
checkid = module.params['checkid']
|
||||
state = module.params['state']
|
||||
uid = module.params['uid']
|
||||
passwd = module.params['passwd']
|
||||
key = module.params['key']
|
||||
|
||||
if (state == "paused" or state == "stopped"):
|
||||
(rc, name, result) = pause(checkid, uid, passwd, key)
|
||||
|
||||
if (state == "running" or state == "started"):
|
||||
(rc, name, result) = unpause(checkid, uid, passwd, key)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(checkid=checkid, name=name, status=result)
|
||||
|
||||
module.exit_json(checkid=checkid, name=name, status=result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
141
plugins/modules/monitoring/rollbar_deployment.py
Normal file
141
plugins/modules/monitoring/rollbar_deployment.py
Normal file
@@ -0,0 +1,141 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2014, Max Riveiro, <kavu13@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: rollbar_deployment
|
||||
author: "Max Riveiro (@kavu)"
|
||||
short_description: Notify Rollbar about app deployments
|
||||
description:
|
||||
- Notify Rollbar about app deployments
|
||||
(see https://rollbar.com/docs/deploys_other/)
|
||||
options:
|
||||
token:
|
||||
description:
|
||||
- Your project access token.
|
||||
required: true
|
||||
environment:
|
||||
description:
|
||||
- Name of the environment being deployed, e.g. 'production'.
|
||||
required: true
|
||||
revision:
|
||||
description:
|
||||
- Revision number/sha being deployed.
|
||||
required: true
|
||||
user:
|
||||
description:
|
||||
- User who deployed.
|
||||
required: false
|
||||
rollbar_user:
|
||||
description:
|
||||
- Rollbar username of the user who deployed.
|
||||
required: false
|
||||
comment:
|
||||
description:
|
||||
- Deploy comment (e.g. what is being deployed).
|
||||
required: false
|
||||
url:
|
||||
description:
|
||||
- Optional URL to submit the notification to.
|
||||
required: false
|
||||
default: 'https://api.rollbar.com/api/1/deploy/'
|
||||
validate_certs:
|
||||
description:
|
||||
- If C(no), SSL certificates for the target url will not be validated.
|
||||
This should only be used on personally controlled sites using
|
||||
self-signed certificates.
|
||||
required: false
|
||||
default: 'yes'
|
||||
type: bool
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Rollbar deployment notification
|
||||
rollbar_deployment:
|
||||
token: AAAAAA
|
||||
environment: staging
|
||||
user: ansible
|
||||
revision: '4.2'
|
||||
rollbar_user: admin
|
||||
comment: Test Deploy
|
||||
|
||||
- name: Notify rollbar about current git revision deployment by current user
|
||||
rollbar_deployment:
|
||||
token: "{{ rollbar_access_token }}"
|
||||
environment: production
|
||||
revision: "{{ lookup('pipe', 'git rev-parse HEAD') }}"
|
||||
user: "{{ lookup('env', 'USER') }}"
|
||||
'''
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
token=dict(required=True),
|
||||
environment=dict(required=True),
|
||||
revision=dict(required=True),
|
||||
user=dict(required=False),
|
||||
rollbar_user=dict(required=False),
|
||||
comment=dict(required=False),
|
||||
url=dict(
|
||||
required=False,
|
||||
default='https://api.rollbar.com/api/1/deploy/'
|
||||
),
|
||||
validate_certs=dict(default='yes', type='bool'),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
|
||||
params = dict(
|
||||
access_token=module.params['token'],
|
||||
environment=module.params['environment'],
|
||||
revision=module.params['revision']
|
||||
)
|
||||
|
||||
if module.params['user']:
|
||||
params['local_username'] = module.params['user']
|
||||
|
||||
if module.params['rollbar_user']:
|
||||
params['rollbar_username'] = module.params['rollbar_user']
|
||||
|
||||
if module.params['comment']:
|
||||
params['comment'] = module.params['comment']
|
||||
|
||||
url = module.params.get('url')
|
||||
|
||||
try:
|
||||
data = urlencode(params)
|
||||
response, info = fetch_url(module, url, data=data, method='POST')
|
||||
except Exception as e:
|
||||
module.fail_json(msg='Unable to notify Rollbar: %s' % to_native(e), exception=traceback.format_exc())
|
||||
else:
|
||||
if info['status'] == 200:
|
||||
module.exit_json(changed=True)
|
||||
else:
|
||||
module.fail_json(msg='HTTP result code: %d connecting to %s' % (info['status'], url))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
358
plugins/modules/monitoring/sensu/sensu_check.py
Normal file
358
plugins/modules/monitoring/sensu/sensu_check.py
Normal file
@@ -0,0 +1,358 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2014, Anders Ingemann <aim@secoya.dk>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: sensu_check
|
||||
short_description: Manage Sensu checks
|
||||
description:
|
||||
- Manage the checks that should be run on a machine by I(Sensu).
|
||||
- Most options do not have a default and will not be added to the check definition unless specified.
|
||||
- All defaults except I(path), I(state), I(backup) and I(metric) are not managed by this module,
|
||||
- they are simply specified for your convenience.
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the check
|
||||
- This is the key that is used to determine whether a check exists
|
||||
required: true
|
||||
state:
|
||||
description:
|
||||
- Whether the check should be present or not
|
||||
choices: [ 'present', 'absent' ]
|
||||
default: present
|
||||
path:
|
||||
description:
|
||||
- Path to the json file of the check to be added/removed.
|
||||
- Will be created if it does not exist (unless I(state=absent)).
|
||||
- The parent folders need to exist when I(state=present), otherwise an error will be thrown
|
||||
default: /etc/sensu/conf.d/checks.json
|
||||
backup:
|
||||
description:
|
||||
- Create a backup file (if yes), including the timestamp information so
|
||||
- you can get the original file back if you somehow clobbered it incorrectly.
|
||||
type: bool
|
||||
default: 'no'
|
||||
command:
|
||||
description:
|
||||
- Path to the sensu check to run (not required when I(state=absent))
|
||||
required: true
|
||||
handlers:
|
||||
description:
|
||||
- List of handlers to notify when the check fails
|
||||
default: []
|
||||
subscribers:
|
||||
description:
|
||||
- List of subscribers/channels this check should run for
|
||||
- See sensu_subscribers to subscribe a machine to a channel
|
||||
default: []
|
||||
interval:
|
||||
description:
|
||||
- Check interval in seconds
|
||||
timeout:
|
||||
description:
|
||||
- Timeout for the check
|
||||
default: 10
|
||||
ttl:
|
||||
description:
|
||||
- Time to live in seconds until the check is considered stale
|
||||
handle:
|
||||
description:
|
||||
- Whether the check should be handled or not
|
||||
type: bool
|
||||
default: 'yes'
|
||||
subdue_begin:
|
||||
description:
|
||||
- When to disable handling of check failures
|
||||
subdue_end:
|
||||
description:
|
||||
- When to enable handling of check failures
|
||||
dependencies:
|
||||
description:
|
||||
- Other checks this check depends on, if dependencies fail,
|
||||
- handling of this check will be disabled
|
||||
default: []
|
||||
metric:
|
||||
description:
|
||||
- Whether the check is a metric
|
||||
type: bool
|
||||
default: 'no'
|
||||
standalone:
|
||||
description:
|
||||
- Whether the check should be scheduled by the sensu client or server
|
||||
- This option obviates the need for specifying the I(subscribers) option
|
||||
type: bool
|
||||
default: 'no'
|
||||
publish:
|
||||
description:
|
||||
- Whether the check should be scheduled at all.
|
||||
- You can still issue it via the sensu api
|
||||
type: bool
|
||||
default: 'yes'
|
||||
occurrences:
|
||||
description:
|
||||
- Number of event occurrences before the handler should take action
|
||||
default: 1
|
||||
refresh:
|
||||
description:
|
||||
- Number of seconds handlers should wait before taking second action
|
||||
aggregate:
|
||||
description:
|
||||
- Classifies the check as an aggregate check,
|
||||
- making it available via the aggregate API
|
||||
type: bool
|
||||
default: 'no'
|
||||
low_flap_threshold:
|
||||
description:
|
||||
- The low threshold for flap detection
|
||||
high_flap_threshold:
|
||||
description:
|
||||
- The high threshold for flap detection
|
||||
custom:
|
||||
description:
|
||||
- A hash/dictionary of custom parameters for mixing to the configuration.
|
||||
- You can't rewrite others module parameters using this
|
||||
default: {}
|
||||
source:
|
||||
description:
|
||||
- The check source, used to create a JIT Sensu client for an external resource (e.g. a network switch).
|
||||
author: "Anders Ingemann (@andsens)"
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Fetch metrics about the CPU load every 60 seconds,
|
||||
# the sensu server has a handler called 'relay' which forwards stats to graphite
|
||||
- name: get cpu metrics
|
||||
sensu_check:
|
||||
name: cpu_load
|
||||
command: /etc/sensu/plugins/system/cpu-mpstat-metrics.rb
|
||||
metric: yes
|
||||
handlers: relay
|
||||
subscribers: common
|
||||
interval: 60
|
||||
|
||||
# Check whether nginx is running
|
||||
- name: check nginx process
|
||||
sensu_check:
|
||||
name: nginx_running
|
||||
command: /etc/sensu/plugins/processes/check-procs.rb -f /var/run/nginx.pid
|
||||
handlers: default
|
||||
subscribers: nginx
|
||||
interval: 60
|
||||
|
||||
# Stop monitoring the disk capacity.
|
||||
# Note that the check will still show up in the sensu dashboard,
|
||||
# to remove it completely you need to issue a DELETE request to the sensu api.
|
||||
- name: check disk
|
||||
sensu_check:
|
||||
name: check_disk_capacity
|
||||
state: absent
|
||||
'''
|
||||
|
||||
import json
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def sensu_check(module, path, name, state='present', backup=False):
|
||||
changed = False
|
||||
reasons = []
|
||||
|
||||
stream = None
|
||||
try:
|
||||
try:
|
||||
stream = open(path, 'r')
|
||||
config = json.load(stream)
|
||||
except IOError as e:
|
||||
if e.errno == 2: # File not found, non-fatal
|
||||
if state == 'absent':
|
||||
reasons.append('file did not exist and state is `absent\'')
|
||||
return changed, reasons
|
||||
config = {}
|
||||
else:
|
||||
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
|
||||
except ValueError:
|
||||
msg = '{path} contains invalid JSON'.format(path=path)
|
||||
module.fail_json(msg=msg)
|
||||
finally:
|
||||
if stream:
|
||||
stream.close()
|
||||
|
||||
if 'checks' not in config:
|
||||
if state == 'absent':
|
||||
reasons.append('`checks\' section did not exist and state is `absent\'')
|
||||
return changed, reasons
|
||||
config['checks'] = {}
|
||||
changed = True
|
||||
reasons.append('`checks\' section did not exist')
|
||||
|
||||
if state == 'absent':
|
||||
if name in config['checks']:
|
||||
del config['checks'][name]
|
||||
changed = True
|
||||
reasons.append('check was present and state is `absent\'')
|
||||
|
||||
if state == 'present':
|
||||
if name not in config['checks']:
|
||||
check = {}
|
||||
config['checks'][name] = check
|
||||
changed = True
|
||||
reasons.append('check was absent and state is `present\'')
|
||||
else:
|
||||
check = config['checks'][name]
|
||||
simple_opts = ['command',
|
||||
'handlers',
|
||||
'subscribers',
|
||||
'interval',
|
||||
'timeout',
|
||||
'ttl',
|
||||
'handle',
|
||||
'dependencies',
|
||||
'standalone',
|
||||
'publish',
|
||||
'occurrences',
|
||||
'refresh',
|
||||
'aggregate',
|
||||
'low_flap_threshold',
|
||||
'high_flap_threshold',
|
||||
'source',
|
||||
]
|
||||
for opt in simple_opts:
|
||||
if module.params[opt] is not None:
|
||||
if opt not in check or check[opt] != module.params[opt]:
|
||||
check[opt] = module.params[opt]
|
||||
changed = True
|
||||
reasons.append('`{opt}\' did not exist or was different'.format(opt=opt))
|
||||
else:
|
||||
if opt in check:
|
||||
del check[opt]
|
||||
changed = True
|
||||
reasons.append('`{opt}\' was removed'.format(opt=opt))
|
||||
|
||||
if module.params['custom']:
|
||||
# Convert to json
|
||||
custom_params = module.params['custom']
|
||||
overwrited_fields = set(custom_params.keys()) & set(simple_opts + ['type', 'subdue', 'subdue_begin', 'subdue_end'])
|
||||
if overwrited_fields:
|
||||
msg = 'You can\'t overwriting standard module parameters via "custom". You are trying overwrite: {opt}'.format(opt=list(overwrited_fields))
|
||||
module.fail_json(msg=msg)
|
||||
|
||||
for k, v in custom_params.items():
|
||||
if k in config['checks'][name]:
|
||||
if not config['checks'][name][k] == v:
|
||||
changed = True
|
||||
reasons.append('`custom param {opt}\' was changed'.format(opt=k))
|
||||
else:
|
||||
changed = True
|
||||
reasons.append('`custom param {opt}\' was added'.format(opt=k))
|
||||
check[k] = v
|
||||
simple_opts += custom_params.keys()
|
||||
|
||||
# Remove obsolete custom params
|
||||
for opt in set(config['checks'][name].keys()) - set(simple_opts + ['type', 'subdue', 'subdue_begin', 'subdue_end']):
|
||||
changed = True
|
||||
reasons.append('`custom param {opt}\' was deleted'.format(opt=opt))
|
||||
del check[opt]
|
||||
|
||||
if module.params['metric']:
|
||||
if 'type' not in check or check['type'] != 'metric':
|
||||
check['type'] = 'metric'
|
||||
changed = True
|
||||
reasons.append('`type\' was not defined or not `metric\'')
|
||||
if not module.params['metric'] and 'type' in check:
|
||||
del check['type']
|
||||
changed = True
|
||||
reasons.append('`type\' was defined')
|
||||
|
||||
if module.params['subdue_begin'] is not None and module.params['subdue_end'] is not None:
|
||||
subdue = {'begin': module.params['subdue_begin'],
|
||||
'end': module.params['subdue_end'],
|
||||
}
|
||||
if 'subdue' not in check or check['subdue'] != subdue:
|
||||
check['subdue'] = subdue
|
||||
changed = True
|
||||
reasons.append('`subdue\' did not exist or was different')
|
||||
else:
|
||||
if 'subdue' in check:
|
||||
del check['subdue']
|
||||
changed = True
|
||||
reasons.append('`subdue\' was removed')
|
||||
|
||||
if changed and not module.check_mode:
|
||||
if backup:
|
||||
module.backup_local(path)
|
||||
try:
|
||||
try:
|
||||
stream = open(path, 'w')
|
||||
stream.write(json.dumps(config, indent=2) + '\n')
|
||||
except IOError as e:
|
||||
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
|
||||
finally:
|
||||
if stream:
|
||||
stream.close()
|
||||
|
||||
return changed, reasons
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
arg_spec = {'name': {'type': 'str', 'required': True},
|
||||
'path': {'type': 'str', 'default': '/etc/sensu/conf.d/checks.json'},
|
||||
'state': {'type': 'str', 'default': 'present', 'choices': ['present', 'absent']},
|
||||
'backup': {'type': 'bool', 'default': 'no'},
|
||||
'command': {'type': 'str'},
|
||||
'handlers': {'type': 'list'},
|
||||
'subscribers': {'type': 'list'},
|
||||
'interval': {'type': 'int'},
|
||||
'timeout': {'type': 'int'},
|
||||
'ttl': {'type': 'int'},
|
||||
'handle': {'type': 'bool'},
|
||||
'subdue_begin': {'type': 'str'},
|
||||
'subdue_end': {'type': 'str'},
|
||||
'dependencies': {'type': 'list'},
|
||||
'metric': {'type': 'bool', 'default': 'no'},
|
||||
'standalone': {'type': 'bool'},
|
||||
'publish': {'type': 'bool'},
|
||||
'occurrences': {'type': 'int'},
|
||||
'refresh': {'type': 'int'},
|
||||
'aggregate': {'type': 'bool'},
|
||||
'low_flap_threshold': {'type': 'int'},
|
||||
'high_flap_threshold': {'type': 'int'},
|
||||
'custom': {'type': 'dict'},
|
||||
'source': {'type': 'str'},
|
||||
}
|
||||
|
||||
required_together = [['subdue_begin', 'subdue_end']]
|
||||
|
||||
module = AnsibleModule(argument_spec=arg_spec,
|
||||
required_together=required_together,
|
||||
supports_check_mode=True)
|
||||
if module.params['state'] != 'absent' and module.params['command'] is None:
|
||||
module.fail_json(msg="missing required arguments: %s" % ",".join(['command']))
|
||||
|
||||
path = module.params['path']
|
||||
name = module.params['name']
|
||||
state = module.params['state']
|
||||
backup = module.params['backup']
|
||||
|
||||
changed, reasons = sensu_check(module, path, name, state, backup)
|
||||
|
||||
module.exit_json(path=path, changed=changed, msg='OK', name=name, reasons=reasons)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
250
plugins/modules/monitoring/sensu/sensu_client.py
Normal file
250
plugins/modules/monitoring/sensu/sensu_client.py
Normal file
@@ -0,0 +1,250 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# (c) 2017, Red Hat Inc.
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: sensu_client
|
||||
author: "David Moreau Simard (@dmsimard)"
|
||||
short_description: Manages Sensu client configuration
|
||||
description:
|
||||
- Manages Sensu client configuration.
|
||||
- 'For more information, refer to the Sensu documentation: U(https://sensuapp.org/docs/latest/reference/clients.html)'
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- Whether the client should be present or not
|
||||
choices: [ 'present', 'absent' ]
|
||||
default: present
|
||||
name:
|
||||
description:
|
||||
- A unique name for the client. The name cannot contain special characters or spaces.
|
||||
default: System hostname as determined by Ruby Socket.gethostname (provided by Sensu)
|
||||
address:
|
||||
description:
|
||||
- An address to help identify and reach the client. This is only informational, usually an IP address or hostname.
|
||||
default: Non-loopback IPv4 address as determined by Ruby Socket.ip_address_list (provided by Sensu)
|
||||
subscriptions:
|
||||
description:
|
||||
- An array of client subscriptions, a list of roles and/or responsibilities assigned to the system (e.g. webserver).
|
||||
- These subscriptions determine which monitoring checks are executed by the client, as check requests are sent to subscriptions.
|
||||
- The subscriptions array items must be strings.
|
||||
required: True
|
||||
safe_mode:
|
||||
description:
|
||||
- If safe mode is enabled for the client. Safe mode requires local check definitions in order to accept a check request and execute the check.
|
||||
type: bool
|
||||
default: 'no'
|
||||
redact:
|
||||
description:
|
||||
- Client definition attributes to redact (values) when logging and sending client keepalives.
|
||||
socket:
|
||||
description:
|
||||
- The socket definition scope, used to configure the Sensu client socket.
|
||||
keepalives:
|
||||
description:
|
||||
- If Sensu should monitor keepalives for this client.
|
||||
type: bool
|
||||
default: 'yes'
|
||||
keepalive:
|
||||
description:
|
||||
- The keepalive definition scope, used to configure Sensu client keepalives behavior (e.g. keepalive thresholds, etc).
|
||||
registration:
|
||||
description:
|
||||
- The registration definition scope, used to configure Sensu registration event handlers.
|
||||
deregister:
|
||||
description:
|
||||
- If a deregistration event should be created upon Sensu client process stop.
|
||||
type: bool
|
||||
default: 'no'
|
||||
deregistration:
|
||||
description:
|
||||
- The deregistration definition scope, used to configure automated Sensu client de-registration.
|
||||
ec2:
|
||||
description:
|
||||
- The ec2 definition scope, used to configure the Sensu Enterprise AWS EC2 integration (Sensu Enterprise users only).
|
||||
chef:
|
||||
description:
|
||||
- The chef definition scope, used to configure the Sensu Enterprise Chef integration (Sensu Enterprise users only).
|
||||
puppet:
|
||||
description:
|
||||
- The puppet definition scope, used to configure the Sensu Enterprise Puppet integration (Sensu Enterprise users only).
|
||||
servicenow:
|
||||
description:
|
||||
- The servicenow definition scope, used to configure the Sensu Enterprise ServiceNow integration (Sensu Enterprise users only).
|
||||
notes:
|
||||
- Check mode is supported
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Minimum possible configuration
|
||||
- name: Configure Sensu client
|
||||
sensu_client:
|
||||
subscriptions:
|
||||
- default
|
||||
|
||||
# With customization
|
||||
- name: Configure Sensu client
|
||||
sensu_client:
|
||||
name: "{{ ansible_fqdn }}"
|
||||
address: "{{ ansible_default_ipv4['address'] }}"
|
||||
subscriptions:
|
||||
- default
|
||||
- webserver
|
||||
redact:
|
||||
- password
|
||||
socket:
|
||||
bind: 127.0.0.1
|
||||
port: 3030
|
||||
keepalive:
|
||||
thresholds:
|
||||
warning: 180
|
||||
critical: 300
|
||||
handlers:
|
||||
- email
|
||||
custom:
|
||||
- broadcast: irc
|
||||
occurrences: 3
|
||||
register: client
|
||||
notify:
|
||||
- Restart sensu-client
|
||||
|
||||
- name: Secure Sensu client configuration file
|
||||
file:
|
||||
path: "{{ client['file'] }}"
|
||||
owner: "sensu"
|
||||
group: "sensu"
|
||||
mode: "0600"
|
||||
|
||||
- name: Delete the Sensu client configuration
|
||||
sensu_client:
|
||||
state: "absent"
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
config:
|
||||
description: Effective client configuration, when state is present
|
||||
returned: success
|
||||
type: dict
|
||||
sample: {'name': 'client', 'subscriptions': ['default']}
|
||||
file:
|
||||
description: Path to the client configuration file
|
||||
returned: success
|
||||
type: str
|
||||
sample: "/etc/sensu/conf.d/client.json"
|
||||
'''
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
supports_check_mode=True,
|
||||
argument_spec=dict(
|
||||
state=dict(type='str', required=False, choices=['present', 'absent'], default='present'),
|
||||
name=dict(type='str', required=False),
|
||||
address=dict(type='str', required=False),
|
||||
subscriptions=dict(type='list', required=False),
|
||||
safe_mode=dict(type='bool', required=False, default=False),
|
||||
redact=dict(type='list', required=False),
|
||||
socket=dict(type='dict', required=False),
|
||||
keepalives=dict(type='bool', required=False, default=True),
|
||||
keepalive=dict(type='dict', required=False),
|
||||
registration=dict(type='dict', required=False),
|
||||
deregister=dict(type='bool', required=False),
|
||||
deregistration=dict(type='dict', required=False),
|
||||
ec2=dict(type='dict', required=False),
|
||||
chef=dict(type='dict', required=False),
|
||||
puppet=dict(type='dict', required=False),
|
||||
servicenow=dict(type='dict', required=False)
|
||||
),
|
||||
required_if=[
|
||||
['state', 'present', ['subscriptions']]
|
||||
]
|
||||
)
|
||||
|
||||
state = module.params['state']
|
||||
path = "/etc/sensu/conf.d/client.json"
|
||||
|
||||
if state == 'absent':
|
||||
if os.path.exists(path):
|
||||
if module.check_mode:
|
||||
msg = '{path} would have been deleted'.format(path=path)
|
||||
module.exit_json(msg=msg, changed=True)
|
||||
else:
|
||||
try:
|
||||
os.remove(path)
|
||||
msg = '{path} deleted successfully'.format(path=path)
|
||||
module.exit_json(msg=msg, changed=True)
|
||||
except OSError as e:
|
||||
msg = 'Exception when trying to delete {path}: {exception}'
|
||||
module.fail_json(
|
||||
msg=msg.format(path=path, exception=str(e)))
|
||||
else:
|
||||
# Idempotency: it's okay if the file doesn't exist
|
||||
msg = '{path} already does not exist'.format(path=path)
|
||||
module.exit_json(msg=msg)
|
||||
|
||||
# Build client configuration from module arguments
|
||||
config = {'client': {}}
|
||||
args = ['name', 'address', 'subscriptions', 'safe_mode', 'redact',
|
||||
'socket', 'keepalives', 'keepalive', 'registration', 'deregister',
|
||||
'deregistration', 'ec2', 'chef', 'puppet', 'servicenow']
|
||||
|
||||
for arg in args:
|
||||
if arg in module.params and module.params[arg] is not None:
|
||||
config['client'][arg] = module.params[arg]
|
||||
|
||||
# Load the current config, if there is one, so we can compare
|
||||
current_config = None
|
||||
try:
|
||||
current_config = json.load(open(path, 'r'))
|
||||
except (IOError, ValueError):
|
||||
# File either doesn't exist or it's invalid JSON
|
||||
pass
|
||||
|
||||
if current_config is not None and current_config == config:
|
||||
# Config is the same, let's not change anything
|
||||
module.exit_json(msg='Client configuration is already up to date',
|
||||
config=config['client'],
|
||||
file=path)
|
||||
|
||||
# Validate that directory exists before trying to write to it
|
||||
if not module.check_mode and not os.path.exists(os.path.dirname(path)):
|
||||
try:
|
||||
os.makedirs(os.path.dirname(path))
|
||||
except OSError as e:
|
||||
module.fail_json(msg='Unable to create {0}: {1}'.format(os.path.dirname(path),
|
||||
str(e)))
|
||||
|
||||
if module.check_mode:
|
||||
module.exit_json(msg='Client configuration would have been updated',
|
||||
changed=True,
|
||||
config=config['client'],
|
||||
file=path)
|
||||
|
||||
try:
|
||||
with open(path, 'w') as client:
|
||||
client.write(json.dumps(config, indent=4))
|
||||
module.exit_json(msg='Client configuration updated',
|
||||
changed=True,
|
||||
config=config['client'],
|
||||
file=path)
|
||||
except (OSError, IOError) as e:
|
||||
module.fail_json(msg='Unable to write file {0}: {1}'.format(path,
|
||||
str(e)))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
263
plugins/modules/monitoring/sensu/sensu_handler.py
Normal file
263
plugins/modules/monitoring/sensu/sensu_handler.py
Normal file
@@ -0,0 +1,263 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# (c) 2017, Red Hat Inc.
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: sensu_handler
|
||||
author: "David Moreau Simard (@dmsimard)"
|
||||
short_description: Manages Sensu handler configuration
|
||||
description:
|
||||
- Manages Sensu handler configuration
|
||||
- 'For more information, refer to the Sensu documentation: U(https://sensuapp.org/docs/latest/reference/handlers.html)'
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- Whether the handler should be present or not
|
||||
choices: [ 'present', 'absent' ]
|
||||
default: present
|
||||
name:
|
||||
description:
|
||||
- A unique name for the handler. The name cannot contain special characters or spaces.
|
||||
required: True
|
||||
type:
|
||||
description:
|
||||
- The handler type
|
||||
choices: [ 'pipe', 'tcp', 'udp', 'transport', 'set' ]
|
||||
required: True
|
||||
filter:
|
||||
description:
|
||||
- The Sensu event filter (name) to use when filtering events for the handler.
|
||||
filters:
|
||||
description:
|
||||
- An array of Sensu event filters (names) to use when filtering events for the handler.
|
||||
- Each array item must be a string.
|
||||
severities:
|
||||
description:
|
||||
- An array of check result severities the handler will handle.
|
||||
- 'NOTE: event resolution bypasses this filtering.'
|
||||
choices: [ 'warning', 'critical', 'unknown' ]
|
||||
mutator:
|
||||
description:
|
||||
- The Sensu event mutator (name) to use to mutate event data for the handler.
|
||||
timeout:
|
||||
description:
|
||||
- The handler execution duration timeout in seconds (hard stop).
|
||||
- Only used by pipe and tcp handler types.
|
||||
default: 10
|
||||
handle_silenced:
|
||||
description:
|
||||
- If events matching one or more silence entries should be handled.
|
||||
type: bool
|
||||
default: 'no'
|
||||
handle_flapping:
|
||||
description:
|
||||
- If events in the flapping state should be handled.
|
||||
type: bool
|
||||
default: 'no'
|
||||
command:
|
||||
description:
|
||||
- The handler command to be executed.
|
||||
- The event data is passed to the process via STDIN.
|
||||
- 'NOTE: the command attribute is only required for Pipe handlers (i.e. handlers configured with "type": "pipe").'
|
||||
socket:
|
||||
description:
|
||||
- The socket definition scope, used to configure the TCP/UDP handler socket.
|
||||
- 'NOTE: the socket attribute is only required for TCP/UDP handlers (i.e. handlers configured with "type": "tcp" or "type": "udp").'
|
||||
pipe:
|
||||
description:
|
||||
- The pipe definition scope, used to configure the Sensu transport pipe.
|
||||
- 'NOTE: the pipe attribute is only required for Transport handlers (i.e. handlers configured with "type": "transport").'
|
||||
handlers:
|
||||
description:
|
||||
- An array of Sensu event handlers (names) to use for events using the handler set.
|
||||
- Each array item must be a string.
|
||||
- 'NOTE: the handlers attribute is only required for handler sets (i.e. handlers configured with "type": "set").'
|
||||
notes:
|
||||
- Check mode is supported
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Configure a handler that sends event data as STDIN (pipe)
|
||||
- name: Configure IRC Sensu handler
|
||||
sensu_handler:
|
||||
name: "irc_handler"
|
||||
type: "pipe"
|
||||
command: "/usr/local/bin/notify-irc.sh"
|
||||
severities:
|
||||
- "ok"
|
||||
- "critical"
|
||||
- "warning"
|
||||
- "unknown"
|
||||
timeout: 15
|
||||
notify:
|
||||
- Restart sensu-client
|
||||
- Restart sensu-server
|
||||
|
||||
# Delete a handler
|
||||
- name: Delete IRC Sensu handler
|
||||
sensu_handler:
|
||||
name: "irc_handler"
|
||||
state: "absent"
|
||||
|
||||
# Example of a TCP handler
|
||||
- name: Configure TCP Sensu handler
|
||||
sensu_handler:
|
||||
name: "tcp_handler"
|
||||
type: "tcp"
|
||||
timeout: 30
|
||||
socket:
|
||||
host: "10.0.1.99"
|
||||
port: 4444
|
||||
register: handler
|
||||
notify:
|
||||
- Restart sensu-client
|
||||
- Restart sensu-server
|
||||
|
||||
- name: Secure Sensu handler configuration file
|
||||
file:
|
||||
path: "{{ handler['file'] }}"
|
||||
owner: "sensu"
|
||||
group: "sensu"
|
||||
mode: "0600"
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
config:
|
||||
description: Effective handler configuration, when state is present
|
||||
returned: success
|
||||
type: dict
|
||||
sample: {'name': 'irc', 'type': 'pipe', 'command': '/usr/local/bin/notify-irc.sh'}
|
||||
file:
|
||||
description: Path to the handler configuration file
|
||||
returned: success
|
||||
type: str
|
||||
sample: "/etc/sensu/conf.d/handlers/irc.json"
|
||||
name:
|
||||
description: Name of the handler
|
||||
returned: success
|
||||
type: str
|
||||
sample: "irc"
|
||||
'''
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
supports_check_mode=True,
|
||||
argument_spec=dict(
|
||||
state=dict(type='str', required=False, choices=['present', 'absent'], default='present'),
|
||||
name=dict(type='str', required=True),
|
||||
type=dict(type='str', required=False, choices=['pipe', 'tcp', 'udp', 'transport', 'set']),
|
||||
filter=dict(type='str', required=False),
|
||||
filters=dict(type='list', required=False),
|
||||
severities=dict(type='list', required=False),
|
||||
mutator=dict(type='str', required=False),
|
||||
timeout=dict(type='int', required=False, default=10),
|
||||
handle_silenced=dict(type='bool', required=False, default=False),
|
||||
handle_flapping=dict(type='bool', required=False, default=False),
|
||||
command=dict(type='str', required=False),
|
||||
socket=dict(type='dict', required=False),
|
||||
pipe=dict(type='dict', required=False),
|
||||
handlers=dict(type='list', required=False),
|
||||
),
|
||||
required_if=[
|
||||
['state', 'present', ['type']],
|
||||
['type', 'pipe', ['command']],
|
||||
['type', 'tcp', ['socket']],
|
||||
['type', 'udp', ['socket']],
|
||||
['type', 'transport', ['pipe']],
|
||||
['type', 'set', ['handlers']]
|
||||
]
|
||||
)
|
||||
|
||||
state = module.params['state']
|
||||
name = module.params['name']
|
||||
path = '/etc/sensu/conf.d/handlers/{0}.json'.format(name)
|
||||
|
||||
if state == 'absent':
|
||||
if os.path.exists(path):
|
||||
if module.check_mode:
|
||||
msg = '{path} would have been deleted'.format(path=path)
|
||||
module.exit_json(msg=msg, changed=True)
|
||||
else:
|
||||
try:
|
||||
os.remove(path)
|
||||
msg = '{path} deleted successfully'.format(path=path)
|
||||
module.exit_json(msg=msg, changed=True)
|
||||
except OSError as e:
|
||||
msg = 'Exception when trying to delete {path}: {exception}'
|
||||
module.fail_json(
|
||||
msg=msg.format(path=path, exception=str(e)))
|
||||
else:
|
||||
# Idempotency: it's okay if the file doesn't exist
|
||||
msg = '{path} already does not exist'.format(path=path)
|
||||
module.exit_json(msg=msg)
|
||||
|
||||
# Build handler configuration from module arguments
|
||||
config = {'handlers': {name: {}}}
|
||||
args = ['type', 'filter', 'filters', 'severities', 'mutator', 'timeout',
|
||||
'handle_silenced', 'handle_flapping', 'command', 'socket',
|
||||
'pipe', 'handlers']
|
||||
|
||||
for arg in args:
|
||||
if arg in module.params and module.params[arg] is not None:
|
||||
config['handlers'][name][arg] = module.params[arg]
|
||||
|
||||
# Load the current config, if there is one, so we can compare
|
||||
current_config = None
|
||||
try:
|
||||
current_config = json.load(open(path, 'r'))
|
||||
except (IOError, ValueError):
|
||||
# File either doesn't exist or it's invalid JSON
|
||||
pass
|
||||
|
||||
if current_config is not None and current_config == config:
|
||||
# Config is the same, let's not change anything
|
||||
module.exit_json(msg='Handler configuration is already up to date',
|
||||
config=config['handlers'][name],
|
||||
file=path,
|
||||
name=name)
|
||||
|
||||
# Validate that directory exists before trying to write to it
|
||||
if not module.check_mode and not os.path.exists(os.path.dirname(path)):
|
||||
try:
|
||||
os.makedirs(os.path.dirname(path))
|
||||
except OSError as e:
|
||||
module.fail_json(msg='Unable to create {0}: {1}'.format(os.path.dirname(path),
|
||||
str(e)))
|
||||
|
||||
if module.check_mode:
|
||||
module.exit_json(msg='Handler configuration would have been updated',
|
||||
changed=True,
|
||||
config=config['handlers'][name],
|
||||
file=path,
|
||||
name=name)
|
||||
|
||||
try:
|
||||
with open(path, 'w') as handler:
|
||||
handler.write(json.dumps(config, indent=4))
|
||||
module.exit_json(msg='Handler configuration updated',
|
||||
changed=True,
|
||||
config=config['handlers'][name],
|
||||
file=path,
|
||||
name=name)
|
||||
except (OSError, IOError) as e:
|
||||
module.fail_json(msg='Unable to write file {0}: {1}'.format(path,
|
||||
str(e)))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
295
plugins/modules/monitoring/sensu/sensu_silence.py
Normal file
295
plugins/modules/monitoring/sensu/sensu_silence.py
Normal file
@@ -0,0 +1,295 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2017, Steven Bambling <smbambling@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: sensu_silence
|
||||
author: Steven Bambling (@smbambling)
|
||||
short_description: Manage Sensu silence entries
|
||||
description:
|
||||
- Create and clear (delete) a silence entries via the Sensu API
|
||||
for subscriptions and checks.
|
||||
options:
|
||||
check:
|
||||
description:
|
||||
- Specifies the check which the silence entry applies to.
|
||||
creator:
|
||||
description:
|
||||
- Specifies the entity responsible for this entry.
|
||||
expire:
|
||||
description:
|
||||
- If specified, the silence entry will be automatically cleared
|
||||
after this number of seconds.
|
||||
expire_on_resolve:
|
||||
description:
|
||||
- If specified as true, the silence entry will be automatically
|
||||
cleared once the condition it is silencing is resolved.
|
||||
type: bool
|
||||
reason:
|
||||
description:
|
||||
- If specified, this free-form string is used to provide context or
|
||||
rationale for the reason this silence entry was created.
|
||||
state:
|
||||
description:
|
||||
- Specifies to create or clear (delete) a silence entry via the Sensu API
|
||||
required: true
|
||||
default: present
|
||||
choices: ['present', 'absent']
|
||||
subscription:
|
||||
description:
|
||||
- Specifies the subscription which the silence entry applies to.
|
||||
- To create a silence entry for a client prepend C(client:) to client name.
|
||||
Example - C(client:server1.example.dev)
|
||||
required: true
|
||||
default: []
|
||||
url:
|
||||
description:
|
||||
- Specifies the URL of the Sensu monitoring host server.
|
||||
required: false
|
||||
default: http://127.0.01:4567
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Silence ALL checks for a given client
|
||||
- name: Silence server1.example.dev
|
||||
sensu_silence:
|
||||
subscription: client:server1.example.dev
|
||||
creator: "{{ ansible_user_id }}"
|
||||
reason: Performing maintenance
|
||||
|
||||
# Silence specific check for a client
|
||||
- name: Silence CPU_Usage check for server1.example.dev
|
||||
sensu_silence:
|
||||
subscription: client:server1.example.dev
|
||||
check: CPU_Usage
|
||||
creator: "{{ ansible_user_id }}"
|
||||
reason: Investigation alert issue
|
||||
|
||||
# Silence multiple clients from a dict
|
||||
silence:
|
||||
server1.example.dev:
|
||||
reason: 'Deployment in progress'
|
||||
server2.example.dev:
|
||||
reason: 'Deployment in progress'
|
||||
|
||||
- name: Silence several clients from a dict
|
||||
sensu_silence:
|
||||
subscription: "client:{{ item.key }}"
|
||||
reason: "{{ item.value.reason }}"
|
||||
creator: "{{ ansible_user_id }}"
|
||||
with_dict: "{{ silence }}"
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
'''
|
||||
|
||||
import json
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
|
||||
|
||||
def query(module, url, check, subscription):
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
url = url + '/silenced'
|
||||
|
||||
request_data = {
|
||||
'check': check,
|
||||
'subscription': subscription,
|
||||
}
|
||||
|
||||
# Remove keys with None value
|
||||
for k, v in dict(request_data).items():
|
||||
if v is None:
|
||||
del request_data[k]
|
||||
|
||||
response, info = fetch_url(
|
||||
module, url, method='GET',
|
||||
headers=headers, data=json.dumps(request_data)
|
||||
)
|
||||
|
||||
if info['status'] == 500:
|
||||
module.fail_json(
|
||||
msg="Failed to query silence %s. Reason: %s" % (subscription, info)
|
||||
)
|
||||
|
||||
try:
|
||||
json_out = json.loads(response.read())
|
||||
except Exception:
|
||||
json_out = ""
|
||||
|
||||
return False, json_out, False
|
||||
|
||||
|
||||
def clear(module, url, check, subscription):
|
||||
# Test if silence exists before clearing
|
||||
(rc, out, changed) = query(module, url, check, subscription)
|
||||
|
||||
d = dict((i['subscription'], i['check']) for i in out)
|
||||
subscription_exists = subscription in d
|
||||
if check and subscription_exists:
|
||||
exists = (check == d[subscription])
|
||||
else:
|
||||
exists = subscription_exists
|
||||
|
||||
# If check/subscription doesn't exist
|
||||
# exit with changed state of False
|
||||
if not exists:
|
||||
return False, out, changed
|
||||
|
||||
# module.check_mode is inherited from the AnsibleMOdule class
|
||||
if not module.check_mode:
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
url = url + '/silenced/clear'
|
||||
|
||||
request_data = {
|
||||
'check': check,
|
||||
'subscription': subscription,
|
||||
}
|
||||
|
||||
# Remove keys with None value
|
||||
for k, v in dict(request_data).items():
|
||||
if v is None:
|
||||
del request_data[k]
|
||||
|
||||
response, info = fetch_url(
|
||||
module, url, method='POST',
|
||||
headers=headers, data=json.dumps(request_data)
|
||||
)
|
||||
|
||||
if info['status'] != 204:
|
||||
module.fail_json(
|
||||
msg="Failed to silence %s. Reason: %s" % (subscription, info)
|
||||
)
|
||||
|
||||
try:
|
||||
json_out = json.loads(response.read())
|
||||
except Exception:
|
||||
json_out = ""
|
||||
|
||||
return False, json_out, True
|
||||
return False, out, True
|
||||
|
||||
|
||||
def create(
|
||||
module, url, check, creator, expire,
|
||||
expire_on_resolve, reason, subscription):
|
||||
(rc, out, changed) = query(module, url, check, subscription)
|
||||
for i in out:
|
||||
if (i['subscription'] == subscription):
|
||||
if (
|
||||
(check is None or check == i['check']) and
|
||||
(
|
||||
creator == '' or
|
||||
creator == i['creator'])and
|
||||
(
|
||||
reason == '' or
|
||||
reason == i['reason']) and
|
||||
(
|
||||
expire is None or expire == i['expire']) and
|
||||
(
|
||||
expire_on_resolve is None or
|
||||
expire_on_resolve == i['expire_on_resolve']
|
||||
)
|
||||
):
|
||||
return False, out, False
|
||||
|
||||
# module.check_mode is inherited from the AnsibleMOdule class
|
||||
if not module.check_mode:
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
url = url + '/silenced'
|
||||
|
||||
request_data = {
|
||||
'check': check,
|
||||
'creator': creator,
|
||||
'expire': expire,
|
||||
'expire_on_resolve': expire_on_resolve,
|
||||
'reason': reason,
|
||||
'subscription': subscription,
|
||||
}
|
||||
|
||||
# Remove keys with None value
|
||||
for k, v in dict(request_data).items():
|
||||
if v is None:
|
||||
del request_data[k]
|
||||
|
||||
response, info = fetch_url(
|
||||
module, url, method='POST',
|
||||
headers=headers, data=json.dumps(request_data)
|
||||
)
|
||||
|
||||
if info['status'] != 201:
|
||||
module.fail_json(
|
||||
msg="Failed to silence %s. Reason: %s" %
|
||||
(subscription, info['msg'])
|
||||
)
|
||||
|
||||
try:
|
||||
json_out = json.loads(response.read())
|
||||
except Exception:
|
||||
json_out = ""
|
||||
|
||||
return False, json_out, True
|
||||
return False, out, True
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
check=dict(required=False),
|
||||
creator=dict(required=False),
|
||||
expire=dict(type='int', required=False),
|
||||
expire_on_resolve=dict(type='bool', required=False),
|
||||
reason=dict(required=False),
|
||||
state=dict(default='present', choices=['present', 'absent']),
|
||||
subscription=dict(required=True),
|
||||
url=dict(required=False, default='http://127.0.01:4567'),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
url = module.params['url']
|
||||
check = module.params['check']
|
||||
creator = module.params['creator']
|
||||
expire = module.params['expire']
|
||||
expire_on_resolve = module.params['expire_on_resolve']
|
||||
reason = module.params['reason']
|
||||
subscription = module.params['subscription']
|
||||
state = module.params['state']
|
||||
|
||||
if state == 'present':
|
||||
(rc, out, changed) = create(
|
||||
module, url, check, creator,
|
||||
expire, expire_on_resolve, reason, subscription
|
||||
)
|
||||
|
||||
if state == 'absent':
|
||||
(rc, out, changed) = clear(module, url, check, subscription)
|
||||
|
||||
if rc != 0:
|
||||
module.fail_json(msg="failed", result=out)
|
||||
module.exit_json(msg="success", result=out, changed=changed)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
154
plugins/modules/monitoring/sensu/sensu_subscription.py
Normal file
154
plugins/modules/monitoring/sensu/sensu_subscription.py
Normal file
@@ -0,0 +1,154 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2014, Anders Ingemann <aim@secoya.dk>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: sensu_subscription
|
||||
short_description: Manage Sensu subscriptions
|
||||
description:
|
||||
- Manage which I(sensu channels) a machine should subscribe to
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- The name of the channel
|
||||
required: true
|
||||
state:
|
||||
description:
|
||||
- Whether the machine should subscribe or unsubscribe from the channel
|
||||
choices: [ 'present', 'absent' ]
|
||||
required: false
|
||||
default: present
|
||||
path:
|
||||
description:
|
||||
- Path to the subscriptions json file
|
||||
required: false
|
||||
default: /etc/sensu/conf.d/subscriptions.json
|
||||
backup:
|
||||
description:
|
||||
- Create a backup file (if yes), including the timestamp information so you
|
||||
- can get the original file back if you somehow clobbered it incorrectly.
|
||||
type: bool
|
||||
required: false
|
||||
default: no
|
||||
requirements: [ ]
|
||||
author: Anders Ingemann (@andsens)
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
reasons:
|
||||
description: the reasons why the module changed or did not change something
|
||||
returned: success
|
||||
type: list
|
||||
sample: ["channel subscription was absent and state is `present'"]
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Subscribe to the nginx channel
|
||||
- name: subscribe to nginx checks
|
||||
sensu_subscription: name=nginx
|
||||
|
||||
# Unsubscribe from the common checks channel
|
||||
- name: unsubscribe from common checks
|
||||
sensu_subscription: name=common state=absent
|
||||
'''
|
||||
|
||||
import json
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def sensu_subscription(module, path, name, state='present', backup=False):
|
||||
changed = False
|
||||
reasons = []
|
||||
|
||||
try:
|
||||
config = json.load(open(path))
|
||||
except IOError as e:
|
||||
if e.errno == 2: # File not found, non-fatal
|
||||
if state == 'absent':
|
||||
reasons.append('file did not exist and state is `absent\'')
|
||||
return changed, reasons
|
||||
config = {}
|
||||
else:
|
||||
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
|
||||
except ValueError:
|
||||
msg = '{path} contains invalid JSON'.format(path=path)
|
||||
module.fail_json(msg=msg)
|
||||
|
||||
if 'client' not in config:
|
||||
if state == 'absent':
|
||||
reasons.append('`client\' did not exist and state is `absent\'')
|
||||
return changed, reasons
|
||||
config['client'] = {}
|
||||
changed = True
|
||||
reasons.append('`client\' did not exist')
|
||||
|
||||
if 'subscriptions' not in config['client']:
|
||||
if state == 'absent':
|
||||
reasons.append('`client.subscriptions\' did not exist and state is `absent\'')
|
||||
return changed, reasons
|
||||
config['client']['subscriptions'] = []
|
||||
changed = True
|
||||
reasons.append('`client.subscriptions\' did not exist')
|
||||
|
||||
if name not in config['client']['subscriptions']:
|
||||
if state == 'absent':
|
||||
reasons.append('channel subscription was absent')
|
||||
return changed, reasons
|
||||
config['client']['subscriptions'].append(name)
|
||||
changed = True
|
||||
reasons.append('channel subscription was absent and state is `present\'')
|
||||
else:
|
||||
if state == 'absent':
|
||||
config['client']['subscriptions'].remove(name)
|
||||
changed = True
|
||||
reasons.append('channel subscription was present and state is `absent\'')
|
||||
|
||||
if changed and not module.check_mode:
|
||||
if backup:
|
||||
module.backup_local(path)
|
||||
try:
|
||||
open(path, 'w').write(json.dumps(config, indent=2) + '\n')
|
||||
except IOError as e:
|
||||
module.fail_json(msg='Failed to write to file %s: %s' % (path, to_native(e)),
|
||||
exception=traceback.format_exc())
|
||||
|
||||
return changed, reasons
|
||||
|
||||
|
||||
def main():
|
||||
arg_spec = {'name': {'type': 'str', 'required': True},
|
||||
'path': {'type': 'str', 'default': '/etc/sensu/conf.d/subscriptions.json'},
|
||||
'state': {'type': 'str', 'default': 'present', 'choices': ['present', 'absent']},
|
||||
'backup': {'type': 'bool', 'default': 'no'},
|
||||
}
|
||||
|
||||
module = AnsibleModule(argument_spec=arg_spec,
|
||||
supports_check_mode=True)
|
||||
|
||||
path = module.params['path']
|
||||
name = module.params['name']
|
||||
state = module.params['state']
|
||||
backup = module.params['backup']
|
||||
|
||||
changed, reasons = sensu_subscription(module, path, name, state, backup)
|
||||
|
||||
module.exit_json(path=path, name=name, changed=changed, msg='OK', reasons=reasons)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
328
plugins/modules/monitoring/spectrum_device.py
Normal file
328
plugins/modules/monitoring/spectrum_device.py
Normal file
@@ -0,0 +1,328 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2016, Renato Orgito <orgito@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: spectrum_device
|
||||
short_description: Creates/deletes devices in CA Spectrum.
|
||||
description:
|
||||
- This module allows you to create and delete devices in CA Spectrum U(https://www.ca.com/us/products/ca-spectrum.html).
|
||||
- Tested on CA Spectrum 9.4.2, 10.1.1 and 10.2.1
|
||||
author: "Renato Orgito (@orgito)"
|
||||
options:
|
||||
device:
|
||||
aliases: [ host, name ]
|
||||
required: true
|
||||
description:
|
||||
- IP address of the device.
|
||||
- If a hostname is given, it will be resolved to the IP address.
|
||||
community:
|
||||
description:
|
||||
- SNMP community used for device discovery.
|
||||
- Required when C(state=present).
|
||||
landscape:
|
||||
required: true
|
||||
description:
|
||||
- Landscape handle of the SpectroServer to which add or remove the device.
|
||||
state:
|
||||
required: false
|
||||
description:
|
||||
- On C(present) creates the device when it does not exist.
|
||||
- On C(absent) removes the device when it exists.
|
||||
choices: ['present', 'absent']
|
||||
default: 'present'
|
||||
url:
|
||||
aliases: [ oneclick_url ]
|
||||
required: true
|
||||
description:
|
||||
- HTTP, HTTPS URL of the Oneclick server in the form (http|https)://host.domain[:port]
|
||||
url_username:
|
||||
aliases: [ oneclick_user ]
|
||||
required: true
|
||||
description:
|
||||
- Oneclick user name.
|
||||
url_password:
|
||||
aliases: [ oneclick_password ]
|
||||
required: true
|
||||
description:
|
||||
- Oneclick user password.
|
||||
use_proxy:
|
||||
required: false
|
||||
description:
|
||||
- if C(no), it will not use a proxy, even if one is defined in an environment
|
||||
variable on the target hosts.
|
||||
default: 'yes'
|
||||
type: bool
|
||||
validate_certs:
|
||||
required: false
|
||||
description:
|
||||
- If C(no), SSL certificates will not be validated. This should only be used
|
||||
on personally controlled sites using self-signed certificates.
|
||||
default: 'yes'
|
||||
type: bool
|
||||
agentport:
|
||||
required: false
|
||||
description:
|
||||
- UDP port used for SNMP discovery.
|
||||
default: 161
|
||||
notes:
|
||||
- The devices will be created inside the I(Universe) container of the specified landscape.
|
||||
- All the operations will be performed only on the specified landscape.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Add device to CA Spectrum
|
||||
local_action:
|
||||
module: spectrum_device
|
||||
device: '{{ ansible_host }}'
|
||||
community: secret
|
||||
landscape: '0x100000'
|
||||
oneclick_url: http://oneclick.example.com:8080
|
||||
oneclick_user: username
|
||||
oneclick_password: password
|
||||
state: present
|
||||
|
||||
|
||||
- name: Remove device from CA Spectrum
|
||||
local_action:
|
||||
module: spectrum_device
|
||||
device: '{{ ansible_host }}'
|
||||
landscape: '{{ landscape_handle }}'
|
||||
oneclick_url: http://oneclick.example.com:8080
|
||||
oneclick_user: username
|
||||
oneclick_password: password
|
||||
use_proxy: no
|
||||
state: absent
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
device:
|
||||
description: device data when state = present
|
||||
returned: success
|
||||
type: dict
|
||||
sample: {'model_handle': '0x1007ab', 'landscape': '0x100000', 'address': '10.10.5.1'}
|
||||
'''
|
||||
|
||||
from socket import gethostbyname, gaierror
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
|
||||
|
||||
def request(resource, xml=None, method=None):
|
||||
headers = {
|
||||
"Content-Type": "application/xml",
|
||||
"Accept": "application/xml"
|
||||
}
|
||||
|
||||
url = module.params['oneclick_url'] + '/spectrum/restful/' + resource
|
||||
|
||||
response, info = fetch_url(module, url, data=xml, method=method, headers=headers, timeout=45)
|
||||
|
||||
if info['status'] == 401:
|
||||
module.fail_json(msg="failed to authenticate to Oneclick server")
|
||||
|
||||
if info['status'] not in (200, 201, 204):
|
||||
module.fail_json(msg=info['msg'])
|
||||
|
||||
return response.read()
|
||||
|
||||
|
||||
def post(resource, xml=None):
|
||||
return request(resource, xml=xml, method='POST')
|
||||
|
||||
|
||||
def delete(resource):
|
||||
return request(resource, xml=None, method='DELETE')
|
||||
|
||||
|
||||
def get_ip():
|
||||
try:
|
||||
device_ip = gethostbyname(module.params.get('device'))
|
||||
except gaierror:
|
||||
module.fail_json(msg="failed to resolve device ip address for '%s'" % module.params.get('device'))
|
||||
|
||||
return device_ip
|
||||
|
||||
|
||||
def get_device(device_ip):
|
||||
"""Query OneClick for the device using the IP Address"""
|
||||
resource = '/models'
|
||||
landscape_min = "0x%x" % int(module.params.get('landscape'), 16)
|
||||
landscape_max = "0x%x" % (int(module.params.get('landscape'), 16) + 0x100000)
|
||||
|
||||
xml = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rs:model-request throttlesize="5"
|
||||
xmlns:rs="http://www.ca.com/spectrum/restful/schema/request"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.ca.com/spectrum/restful/schema/request ../../../xsd/Request.xsd">
|
||||
<rs:target-models>
|
||||
<rs:models-search>
|
||||
<rs:search-criteria xmlns="http://www.ca.com/spectrum/restful/schema/filter">
|
||||
<action-models>
|
||||
<filtered-models>
|
||||
<and>
|
||||
<equals>
|
||||
<model-type>SearchManager</model-type>
|
||||
</equals>
|
||||
<greater-than>
|
||||
<attribute id="0x129fa">
|
||||
<value>{mh_min}</value>
|
||||
</attribute>
|
||||
</greater-than>
|
||||
<less-than>
|
||||
<attribute id="0x129fa">
|
||||
<value>{mh_max}</value>
|
||||
</attribute>
|
||||
</less-than>
|
||||
</and>
|
||||
</filtered-models>
|
||||
<action>FIND_DEV_MODELS_BY_IP</action>
|
||||
<attribute id="AttributeID.NETWORK_ADDRESS">
|
||||
<value>{search_ip}</value>
|
||||
</attribute>
|
||||
</action-models>
|
||||
</rs:search-criteria>
|
||||
</rs:models-search>
|
||||
</rs:target-models>
|
||||
<rs:requested-attribute id="0x12d7f" /> <!--Network Address-->
|
||||
</rs:model-request>
|
||||
""".format(search_ip=device_ip, mh_min=landscape_min, mh_max=landscape_max)
|
||||
|
||||
result = post(resource, xml=xml)
|
||||
|
||||
root = ET.fromstring(result)
|
||||
|
||||
if root.get('total-models') == '0':
|
||||
return None
|
||||
|
||||
namespace = dict(ca='http://www.ca.com/spectrum/restful/schema/response')
|
||||
|
||||
# get the first device
|
||||
model = root.find('ca:model-responses', namespace).find('ca:model', namespace)
|
||||
|
||||
if model.get('error'):
|
||||
module.fail_json(msg="error checking device: %s" % model.get('error'))
|
||||
|
||||
# get the attributes
|
||||
model_handle = model.get('mh')
|
||||
|
||||
model_address = model.find('./*[@id="0x12d7f"]').text
|
||||
|
||||
# derive the landscape handler from the model handler of the device
|
||||
model_landscape = "0x%x" % int(int(model_handle, 16) // 0x100000 * 0x100000)
|
||||
|
||||
device = dict(
|
||||
model_handle=model_handle,
|
||||
address=model_address,
|
||||
landscape=model_landscape)
|
||||
|
||||
return device
|
||||
|
||||
|
||||
def add_device():
|
||||
device_ip = get_ip()
|
||||
device = get_device(device_ip)
|
||||
|
||||
if device:
|
||||
module.exit_json(changed=False, device=device)
|
||||
|
||||
if module.check_mode:
|
||||
device = dict(
|
||||
model_handle=None,
|
||||
address=device_ip,
|
||||
landscape="0x%x" % int(module.params.get('landscape'), 16))
|
||||
module.exit_json(changed=True, device=device)
|
||||
|
||||
resource = 'model?ipaddress=' + device_ip + '&commstring=' + module.params.get('community')
|
||||
resource += '&landscapeid=' + module.params.get('landscape')
|
||||
|
||||
if module.params.get('agentport', None):
|
||||
resource += '&agentport=' + str(module.params.get('agentport', 161))
|
||||
|
||||
result = post(resource)
|
||||
root = ET.fromstring(result)
|
||||
|
||||
if root.get('error') != 'Success':
|
||||
module.fail_json(msg=root.get('error-message'))
|
||||
|
||||
namespace = dict(ca='http://www.ca.com/spectrum/restful/schema/response')
|
||||
model = root.find('ca:model', namespace)
|
||||
|
||||
model_handle = model.get('mh')
|
||||
model_landscape = "0x%x" % int(int(model_handle, 16) // 0x100000 * 0x100000)
|
||||
|
||||
device = dict(
|
||||
model_handle=model_handle,
|
||||
address=device_ip,
|
||||
landscape=model_landscape,
|
||||
)
|
||||
|
||||
module.exit_json(changed=True, device=device)
|
||||
|
||||
|
||||
def remove_device():
|
||||
device_ip = get_ip()
|
||||
device = get_device(device_ip)
|
||||
|
||||
if device is None:
|
||||
module.exit_json(changed=False)
|
||||
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
|
||||
resource = '/model/' + device['model_handle']
|
||||
result = delete(resource)
|
||||
|
||||
root = ET.fromstring(result)
|
||||
|
||||
namespace = dict(ca='http://www.ca.com/spectrum/restful/schema/response')
|
||||
error = root.find('ca:error', namespace).text
|
||||
|
||||
if error != 'Success':
|
||||
error_message = root.find('ca:error-message', namespace).text
|
||||
module.fail_json(msg="%s %s" % (error, error_message))
|
||||
|
||||
module.exit_json(changed=True)
|
||||
|
||||
|
||||
def main():
|
||||
global module
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
device=dict(required=True, aliases=['host', 'name']),
|
||||
landscape=dict(required=True),
|
||||
state=dict(choices=['present', 'absent'], default='present'),
|
||||
community=dict(required=True, no_log=True),
|
||||
agentport=dict(type='int', default=161),
|
||||
url=dict(required=True, aliases=['oneclick_url']),
|
||||
url_username=dict(required=True, aliases=['oneclick_user']),
|
||||
url_password=dict(required=True, no_log=True, aliases=['oneclick_password']),
|
||||
use_proxy=dict(type='bool', default='yes'),
|
||||
validate_certs=dict(type='bool', default='yes'),
|
||||
),
|
||||
required_if=[('state', 'present', ['community'])],
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if module.params.get('state') == 'present':
|
||||
add_device()
|
||||
else:
|
||||
remove_device()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
206
plugins/modules/monitoring/stackdriver.py
Normal file
206
plugins/modules/monitoring/stackdriver.py
Normal file
@@ -0,0 +1,206 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright: Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
|
||||
module: stackdriver
|
||||
short_description: Send code deploy and annotation events to stackdriver
|
||||
description:
|
||||
- Send code deploy and annotation events to Stackdriver
|
||||
author: "Ben Whaley (@bwhaley)"
|
||||
options:
|
||||
key:
|
||||
description:
|
||||
- API key.
|
||||
required: true
|
||||
event:
|
||||
description:
|
||||
- The type of event to send, either annotation or deploy
|
||||
choices: ['annotation', 'deploy']
|
||||
revision_id:
|
||||
description:
|
||||
- The revision of the code that was deployed. Required for deploy events
|
||||
deployed_by:
|
||||
description:
|
||||
- The person or robot responsible for deploying the code
|
||||
default: "Ansible"
|
||||
deployed_to:
|
||||
description:
|
||||
- "The environment code was deployed to. (ie: development, staging, production)"
|
||||
repository:
|
||||
description:
|
||||
- The repository (or project) deployed
|
||||
msg:
|
||||
description:
|
||||
- The contents of the annotation message, in plain text. Limited to 256 characters. Required for annotation.
|
||||
annotated_by:
|
||||
description:
|
||||
- The person or robot who the annotation should be attributed to.
|
||||
default: "Ansible"
|
||||
level:
|
||||
description:
|
||||
- one of INFO/WARN/ERROR, defaults to INFO if not supplied. May affect display.
|
||||
choices: ['INFO', 'WARN', 'ERROR']
|
||||
default: 'INFO'
|
||||
instance_id:
|
||||
description:
|
||||
- id of an EC2 instance that this event should be attached to, which will limit the contexts where this event is shown
|
||||
event_epoch:
|
||||
description:
|
||||
- "Unix timestamp of where the event should appear in the timeline, defaults to now. Be careful with this."
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- stackdriver:
|
||||
key: AAAAAA
|
||||
event: deploy
|
||||
deployed_to: production
|
||||
deployed_by: leeroyjenkins
|
||||
repository: MyWebApp
|
||||
revision_id: abcd123
|
||||
|
||||
- stackdriver:
|
||||
key: AAAAAA
|
||||
event: annotation
|
||||
msg: Greetings from Ansible
|
||||
annotated_by: leeroyjenkins
|
||||
level: WARN
|
||||
instance_id: i-abcd1234
|
||||
'''
|
||||
|
||||
# ===========================================
|
||||
# Stackdriver module specific support methods.
|
||||
#
|
||||
|
||||
import json
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
|
||||
|
||||
def send_deploy_event(module, key, revision_id, deployed_by='Ansible', deployed_to=None, repository=None):
|
||||
"""Send a deploy event to Stackdriver"""
|
||||
deploy_api = "https://event-gateway.stackdriver.com/v1/deployevent"
|
||||
|
||||
params = {}
|
||||
params['revision_id'] = revision_id
|
||||
params['deployed_by'] = deployed_by
|
||||
if deployed_to:
|
||||
params['deployed_to'] = deployed_to
|
||||
if repository:
|
||||
params['repository'] = repository
|
||||
|
||||
return do_send_request(module, deploy_api, params, key)
|
||||
|
||||
|
||||
def send_annotation_event(module, key, msg, annotated_by='Ansible', level=None, instance_id=None, event_epoch=None):
|
||||
"""Send an annotation event to Stackdriver"""
|
||||
annotation_api = "https://event-gateway.stackdriver.com/v1/annotationevent"
|
||||
|
||||
params = {}
|
||||
params['message'] = msg
|
||||
if annotated_by:
|
||||
params['annotated_by'] = annotated_by
|
||||
if level:
|
||||
params['level'] = level
|
||||
if instance_id:
|
||||
params['instance_id'] = instance_id
|
||||
if event_epoch:
|
||||
params['event_epoch'] = event_epoch
|
||||
|
||||
return do_send_request(module, annotation_api, params, key)
|
||||
|
||||
|
||||
def do_send_request(module, url, params, key):
|
||||
data = json.dumps(params)
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'x-stackdriver-apikey': key
|
||||
}
|
||||
response, info = fetch_url(module, url, headers=headers, data=data, method='POST')
|
||||
if info['status'] != 200:
|
||||
module.fail_json(msg="Unable to send msg: %s" % info['msg'])
|
||||
|
||||
|
||||
# ===========================================
|
||||
# Module execution.
|
||||
#
|
||||
|
||||
def main():
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
key=dict(required=True),
|
||||
event=dict(required=True, choices=['deploy', 'annotation']),
|
||||
msg=dict(),
|
||||
revision_id=dict(),
|
||||
annotated_by=dict(default='Ansible'),
|
||||
level=dict(default='INFO', choices=['INFO', 'WARN', 'ERROR']),
|
||||
instance_id=dict(),
|
||||
event_epoch=dict(),
|
||||
deployed_by=dict(default='Ansible'),
|
||||
deployed_to=dict(),
|
||||
repository=dict(),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
key = module.params["key"]
|
||||
event = module.params["event"]
|
||||
|
||||
# Annotation params
|
||||
msg = module.params["msg"]
|
||||
annotated_by = module.params["annotated_by"]
|
||||
level = module.params["level"]
|
||||
instance_id = module.params["instance_id"]
|
||||
event_epoch = module.params["event_epoch"]
|
||||
|
||||
# Deploy params
|
||||
revision_id = module.params["revision_id"]
|
||||
deployed_by = module.params["deployed_by"]
|
||||
deployed_to = module.params["deployed_to"]
|
||||
repository = module.params["repository"]
|
||||
|
||||
##################################################################
|
||||
# deploy requires revision_id
|
||||
# annotation requires msg
|
||||
# We verify these manually
|
||||
##################################################################
|
||||
|
||||
if event == 'deploy':
|
||||
if not revision_id:
|
||||
module.fail_json(msg="revision_id required for deploy events")
|
||||
try:
|
||||
send_deploy_event(module, key, revision_id, deployed_by, deployed_to, repository)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="unable to sent deploy event: %s" % to_native(e),
|
||||
exception=traceback.format_exc())
|
||||
|
||||
if event == 'annotation':
|
||||
if not msg:
|
||||
module.fail_json(msg="msg required for annotation events")
|
||||
try:
|
||||
send_annotation_event(module, key, msg, annotated_by, level, instance_id, event_epoch)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="unable to sent annotation event: %s" % to_native(e),
|
||||
exception=traceback.format_exc())
|
||||
|
||||
changed = True
|
||||
module.exit_json(changed=changed, deployed_by=deployed_by)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
457
plugins/modules/monitoring/statusio_maintenance.py
Normal file
457
plugins/modules/monitoring/statusio_maintenance.py
Normal file
@@ -0,0 +1,457 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2015, Benjamin Copeland (@bhcopeland) <ben@copeland.me.uk>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
|
||||
module: statusio_maintenance
|
||||
short_description: Create maintenance windows for your status.io dashboard
|
||||
description:
|
||||
- Creates a maintenance window for status.io
|
||||
- Deletes a maintenance window for status.io
|
||||
notes:
|
||||
- You can use the apiary API url (http://docs.statusio.apiary.io/) to
|
||||
capture API traffic
|
||||
- Use start_date and start_time with minutes to set future maintenance window
|
||||
author: Benjamin Copeland (@bhcopeland) <ben@copeland.me.uk>
|
||||
options:
|
||||
title:
|
||||
description:
|
||||
- A descriptive title for the maintenance window
|
||||
default: "A new maintenance window"
|
||||
desc:
|
||||
description:
|
||||
- Message describing the maintenance window
|
||||
default: "Created by Ansible"
|
||||
state:
|
||||
description:
|
||||
- Desired state of the package.
|
||||
default: "present"
|
||||
choices: ["present", "absent"]
|
||||
api_id:
|
||||
description:
|
||||
- Your unique API ID from status.io
|
||||
required: true
|
||||
api_key:
|
||||
description:
|
||||
- Your unique API Key from status.io
|
||||
required: true
|
||||
statuspage:
|
||||
description:
|
||||
- Your unique StatusPage ID from status.io
|
||||
required: true
|
||||
url:
|
||||
description:
|
||||
- Status.io API URL. A private apiary can be used instead.
|
||||
default: "https://api.status.io"
|
||||
components:
|
||||
description:
|
||||
- The given name of your component (server name)
|
||||
aliases: ['component']
|
||||
containers:
|
||||
description:
|
||||
- The given name of your container (data center)
|
||||
aliases: ['container']
|
||||
all_infrastructure_affected:
|
||||
description:
|
||||
- If it affects all components and containers
|
||||
type: bool
|
||||
default: 'no'
|
||||
automation:
|
||||
description:
|
||||
- Automatically start and end the maintenance window
|
||||
type: bool
|
||||
default: 'no'
|
||||
maintenance_notify_now:
|
||||
description:
|
||||
- Notify subscribers now
|
||||
type: bool
|
||||
default: 'no'
|
||||
maintenance_notify_72_hr:
|
||||
description:
|
||||
- Notify subscribers 72 hours before maintenance start time
|
||||
type: bool
|
||||
default: 'no'
|
||||
maintenance_notify_24_hr:
|
||||
description:
|
||||
- Notify subscribers 24 hours before maintenance start time
|
||||
type: bool
|
||||
default: 'no'
|
||||
maintenance_notify_1_hr:
|
||||
description:
|
||||
- Notify subscribers 1 hour before maintenance start time
|
||||
type: bool
|
||||
default: 'no'
|
||||
maintenance_id:
|
||||
description:
|
||||
- The maintenance id number when deleting a maintenance window
|
||||
minutes:
|
||||
description:
|
||||
- The length of time in UTC that the maintenance will run
|
||||
(starting from playbook runtime)
|
||||
default: 10
|
||||
start_date:
|
||||
description:
|
||||
- Date maintenance is expected to start (Month/Day/Year) (UTC)
|
||||
- End Date is worked out from start_date + minutes
|
||||
start_time:
|
||||
description:
|
||||
- Time maintenance is expected to start (Hour:Minutes) (UTC)
|
||||
- End Time is worked out from start_time + minutes
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Create a maintenance window for 10 minutes on server1, with automation to stop the maintenance
|
||||
statusio_maintenance:
|
||||
title: Router Upgrade from ansible
|
||||
desc: Performing a Router Upgrade
|
||||
components: server1.example.com
|
||||
api_id: api_id
|
||||
api_key: api_key
|
||||
statuspage: statuspage_id
|
||||
maintenance_notify_1_hr: True
|
||||
automation: True
|
||||
|
||||
- name: Create a maintenance window for 60 minutes on server1 and server2
|
||||
statusio_maintenance:
|
||||
title: Routine maintenance
|
||||
desc: Some security updates
|
||||
components:
|
||||
- server1.example.com
|
||||
- server2.example.com
|
||||
minutes: 60
|
||||
api_id: api_id
|
||||
api_key: api_key
|
||||
statuspage: statuspage_id
|
||||
maintenance_notify_1_hr: True
|
||||
automation: True
|
||||
delegate_to: localhost
|
||||
|
||||
- name: Create a future maintenance window for 24 hours to all hosts inside the Primary Data Center
|
||||
statusio_maintenance:
|
||||
title: Data center downtime
|
||||
desc: Performing a Upgrade to our data center
|
||||
components: Primary Data Center
|
||||
api_id: api_id
|
||||
api_key: api_key
|
||||
statuspage: statuspage_id
|
||||
start_date: 01/01/2016
|
||||
start_time: 12:00
|
||||
minutes: 1440
|
||||
|
||||
- name: Delete a maintenance window
|
||||
statusio_maintenance:
|
||||
title: Remove a maintenance window
|
||||
maintenance_id: 561f90faf74bc94a4700087b
|
||||
statuspage: statuspage_id
|
||||
api_id: api_id
|
||||
api_key: api_key
|
||||
state: absent
|
||||
|
||||
'''
|
||||
# TODO: Add RETURN documentation.
|
||||
RETURN = ''' # '''
|
||||
|
||||
import datetime
|
||||
import json
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.urls import open_url
|
||||
|
||||
|
||||
def get_api_auth_headers(api_id, api_key, url, statuspage):
|
||||
|
||||
headers = {
|
||||
"x-api-id": api_id,
|
||||
"x-api-key": api_key,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
try:
|
||||
response = open_url(
|
||||
url + "/v2/component/list/" + statuspage, headers=headers)
|
||||
data = json.loads(response.read())
|
||||
if data['status']['message'] == 'Authentication failed':
|
||||
return 1, None, None, "Authentication failed: " \
|
||||
"Check api_id/api_key and statuspage id."
|
||||
else:
|
||||
auth_headers = headers
|
||||
auth_content = data
|
||||
except Exception as e:
|
||||
return 1, None, None, to_native(e)
|
||||
return 0, auth_headers, auth_content, None
|
||||
|
||||
|
||||
def get_component_ids(auth_content, components):
|
||||
host_ids = []
|
||||
lower_components = [x.lower() for x in components]
|
||||
for result in auth_content["result"]:
|
||||
if result['name'].lower() in lower_components:
|
||||
data = {
|
||||
"component_id": result["_id"],
|
||||
"container_id": result["containers"][0]["_id"]
|
||||
}
|
||||
host_ids.append(data)
|
||||
lower_components.remove(result['name'].lower())
|
||||
if len(lower_components):
|
||||
# items not found in the api
|
||||
return 1, None, lower_components
|
||||
return 0, host_ids, None
|
||||
|
||||
|
||||
def get_container_ids(auth_content, containers):
|
||||
host_ids = []
|
||||
lower_containers = [x.lower() for x in containers]
|
||||
for result in auth_content["result"]:
|
||||
if result["containers"][0]["name"].lower() in lower_containers:
|
||||
data = {
|
||||
"component_id": result["_id"],
|
||||
"container_id": result["containers"][0]["_id"]
|
||||
}
|
||||
host_ids.append(data)
|
||||
lower_containers.remove(result["containers"][0]["name"].lower())
|
||||
|
||||
if len(lower_containers):
|
||||
# items not found in the api
|
||||
return 1, None, lower_containers
|
||||
return 0, host_ids, None
|
||||
|
||||
|
||||
def get_date_time(start_date, start_time, minutes):
|
||||
returned_date = []
|
||||
if start_date and start_time:
|
||||
try:
|
||||
datetime.datetime.strptime(start_date, '%m/%d/%Y')
|
||||
returned_date.append(start_date)
|
||||
except (NameError, ValueError):
|
||||
return 1, None, "Not a valid start_date format."
|
||||
try:
|
||||
datetime.datetime.strptime(start_time, '%H:%M')
|
||||
returned_date.append(start_time)
|
||||
except (NameError, ValueError):
|
||||
return 1, None, "Not a valid start_time format."
|
||||
try:
|
||||
# Work out end date/time based on minutes
|
||||
date_time_start = datetime.datetime.strptime(
|
||||
start_time + start_date, '%H:%M%m/%d/%Y')
|
||||
delta = date_time_start + datetime.timedelta(minutes=minutes)
|
||||
returned_date.append(delta.strftime("%m/%d/%Y"))
|
||||
returned_date.append(delta.strftime("%H:%M"))
|
||||
except (NameError, ValueError):
|
||||
return 1, None, "Couldn't work out a valid date"
|
||||
else:
|
||||
now = datetime.datetime.utcnow()
|
||||
delta = now + datetime.timedelta(minutes=minutes)
|
||||
# start_date
|
||||
returned_date.append(now.strftime("%m/%d/%Y"))
|
||||
returned_date.append(now.strftime("%H:%M"))
|
||||
# end_date
|
||||
returned_date.append(delta.strftime("%m/%d/%Y"))
|
||||
returned_date.append(delta.strftime("%H:%M"))
|
||||
return 0, returned_date, None
|
||||
|
||||
|
||||
def create_maintenance(auth_headers, url, statuspage, host_ids,
|
||||
all_infrastructure_affected, automation, title, desc,
|
||||
returned_date, maintenance_notify_now,
|
||||
maintenance_notify_72_hr, maintenance_notify_24_hr,
|
||||
maintenance_notify_1_hr):
|
||||
returned_dates = [[x] for x in returned_date]
|
||||
component_id = []
|
||||
container_id = []
|
||||
for val in host_ids:
|
||||
component_id.append(val['component_id'])
|
||||
container_id.append(val['container_id'])
|
||||
try:
|
||||
values = json.dumps({
|
||||
"statuspage_id": statuspage,
|
||||
"components": component_id,
|
||||
"containers": container_id,
|
||||
"all_infrastructure_affected": str(int(all_infrastructure_affected)),
|
||||
"automation": str(int(automation)),
|
||||
"maintenance_name": title,
|
||||
"maintenance_details": desc,
|
||||
"date_planned_start": returned_dates[0],
|
||||
"time_planned_start": returned_dates[1],
|
||||
"date_planned_end": returned_dates[2],
|
||||
"time_planned_end": returned_dates[3],
|
||||
"maintenance_notify_now": str(int(maintenance_notify_now)),
|
||||
"maintenance_notify_72_hr": str(int(maintenance_notify_72_hr)),
|
||||
"maintenance_notify_24_hr": str(int(maintenance_notify_24_hr)),
|
||||
"maintenance_notify_1_hr": str(int(maintenance_notify_1_hr))
|
||||
})
|
||||
response = open_url(
|
||||
url + "/v2/maintenance/schedule", data=values,
|
||||
headers=auth_headers)
|
||||
data = json.loads(response.read())
|
||||
|
||||
if data["status"]["error"] == "yes":
|
||||
return 1, None, data["status"]["message"]
|
||||
except Exception as e:
|
||||
return 1, None, to_native(e)
|
||||
return 0, None, None
|
||||
|
||||
|
||||
def delete_maintenance(auth_headers, url, statuspage, maintenance_id):
|
||||
try:
|
||||
values = json.dumps({
|
||||
"statuspage_id": statuspage,
|
||||
"maintenance_id": maintenance_id,
|
||||
})
|
||||
response = open_url(
|
||||
url=url + "/v2/maintenance/delete",
|
||||
data=values,
|
||||
headers=auth_headers)
|
||||
data = json.loads(response.read())
|
||||
if data["status"]["error"] == "yes":
|
||||
return 1, None, "Invalid maintenance_id"
|
||||
except Exception as e:
|
||||
return 1, None, to_native(e)
|
||||
return 0, None, None
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
api_id=dict(required=True),
|
||||
api_key=dict(required=True, no_log=True),
|
||||
statuspage=dict(required=True),
|
||||
state=dict(required=False, default='present',
|
||||
choices=['present', 'absent']),
|
||||
url=dict(default='https://api.status.io', required=False),
|
||||
components=dict(type='list', required=False, default=None,
|
||||
aliases=['component']),
|
||||
containers=dict(type='list', required=False, default=None,
|
||||
aliases=['container']),
|
||||
all_infrastructure_affected=dict(type='bool', default=False,
|
||||
required=False),
|
||||
automation=dict(type='bool', default=False, required=False),
|
||||
title=dict(required=False, default='A new maintenance window'),
|
||||
desc=dict(required=False, default='Created by Ansible'),
|
||||
minutes=dict(type='int', required=False, default=10),
|
||||
maintenance_notify_now=dict(type='bool', default=False,
|
||||
required=False),
|
||||
maintenance_notify_72_hr=dict(type='bool', default=False,
|
||||
required=False),
|
||||
maintenance_notify_24_hr=dict(type='bool', default=False,
|
||||
required=False),
|
||||
maintenance_notify_1_hr=dict(type='bool', default=False,
|
||||
required=False),
|
||||
maintenance_id=dict(required=False, default=None),
|
||||
start_date=dict(default=None, required=False),
|
||||
start_time=dict(default=None, required=False)
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
api_id = module.params['api_id']
|
||||
api_key = module.params['api_key']
|
||||
statuspage = module.params['statuspage']
|
||||
state = module.params['state']
|
||||
url = module.params['url']
|
||||
components = module.params['components']
|
||||
containers = module.params['containers']
|
||||
all_infrastructure_affected = module.params['all_infrastructure_affected']
|
||||
automation = module.params['automation']
|
||||
title = module.params['title']
|
||||
desc = module.params['desc']
|
||||
minutes = module.params['minutes']
|
||||
maintenance_notify_now = module.params['maintenance_notify_now']
|
||||
maintenance_notify_72_hr = module.params['maintenance_notify_72_hr']
|
||||
maintenance_notify_24_hr = module.params['maintenance_notify_24_hr']
|
||||
maintenance_notify_1_hr = module.params['maintenance_notify_1_hr']
|
||||
maintenance_id = module.params['maintenance_id']
|
||||
start_date = module.params['start_date']
|
||||
start_time = module.params['start_time']
|
||||
|
||||
if state == "present":
|
||||
|
||||
if api_id and api_key:
|
||||
(rc, auth_headers, auth_content, error) = \
|
||||
get_api_auth_headers(api_id, api_key, url, statuspage)
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed to get auth keys: %s" % error)
|
||||
else:
|
||||
auth_headers = {}
|
||||
auth_content = {}
|
||||
|
||||
if minutes or start_time and start_date:
|
||||
(rc, returned_date, error) = get_date_time(
|
||||
start_date, start_time, minutes)
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed to set date/time: %s" % error)
|
||||
|
||||
if not components and not containers:
|
||||
return module.fail_json(msg="A Component or Container must be "
|
||||
"defined")
|
||||
elif components and containers:
|
||||
return module.fail_json(msg="Components and containers cannot "
|
||||
"be used together")
|
||||
else:
|
||||
if components:
|
||||
(rc, host_ids, error) = get_component_ids(auth_content,
|
||||
components)
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed to find component %s" % error)
|
||||
|
||||
if containers:
|
||||
(rc, host_ids, error) = get_container_ids(auth_content,
|
||||
containers)
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed to find container %s" % error)
|
||||
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
else:
|
||||
(rc, _, error) = create_maintenance(
|
||||
auth_headers, url, statuspage, host_ids,
|
||||
all_infrastructure_affected, automation,
|
||||
title, desc, returned_date, maintenance_notify_now,
|
||||
maintenance_notify_72_hr, maintenance_notify_24_hr,
|
||||
maintenance_notify_1_hr)
|
||||
if rc == 0:
|
||||
module.exit_json(changed=True, result="Successfully created "
|
||||
"maintenance")
|
||||
else:
|
||||
module.fail_json(msg="Failed to create maintenance: %s"
|
||||
% error)
|
||||
|
||||
if state == "absent":
|
||||
|
||||
if api_id and api_key:
|
||||
(rc, auth_headers, auth_content, error) = \
|
||||
get_api_auth_headers(api_id, api_key, url, statuspage)
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed to get auth keys: %s" % error)
|
||||
else:
|
||||
auth_headers = {}
|
||||
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=True)
|
||||
else:
|
||||
(rc, _, error) = delete_maintenance(
|
||||
auth_headers, url, statuspage, maintenance_id)
|
||||
if rc == 0:
|
||||
module.exit_json(
|
||||
changed=True,
|
||||
result="Successfully deleted maintenance"
|
||||
)
|
||||
else:
|
||||
module.fail_json(
|
||||
msg="Failed to delete maintenance: %s" % error)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
151
plugins/modules/monitoring/uptimerobot.py
Normal file
151
plugins/modules/monitoring/uptimerobot.py
Normal file
@@ -0,0 +1,151 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright: Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
|
||||
module: uptimerobot
|
||||
short_description: Pause and start Uptime Robot monitoring
|
||||
description:
|
||||
- This module will let you start and pause Uptime Robot Monitoring
|
||||
author: "Nate Kingsley (@nate-kingsley)"
|
||||
requirements:
|
||||
- Valid Uptime Robot API Key
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- Define whether or not the monitor should be running or paused.
|
||||
required: true
|
||||
choices: [ "started", "paused" ]
|
||||
monitorid:
|
||||
description:
|
||||
- ID of the monitor to check.
|
||||
required: true
|
||||
apikey:
|
||||
description:
|
||||
- Uptime Robot API key.
|
||||
required: true
|
||||
notes:
|
||||
- Support for adding and removing monitors and alert contacts has not yet been implemented.
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
# Pause the monitor with an ID of 12345.
|
||||
- uptimerobot:
|
||||
monitorid: 12345
|
||||
apikey: 12345-1234512345
|
||||
state: paused
|
||||
|
||||
# Start the monitor with an ID of 12345.
|
||||
- uptimerobot:
|
||||
monitorid: 12345
|
||||
apikey: 12345-1234512345
|
||||
state: started
|
||||
'''
|
||||
|
||||
import json
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
from ansible.module_utils._text import to_text
|
||||
|
||||
|
||||
API_BASE = "https://api.uptimerobot.com/"
|
||||
|
||||
API_ACTIONS = dict(
|
||||
status='getMonitors?',
|
||||
editMonitor='editMonitor?'
|
||||
)
|
||||
|
||||
API_FORMAT = 'json'
|
||||
API_NOJSONCALLBACK = 1
|
||||
CHANGED_STATE = False
|
||||
SUPPORTS_CHECK_MODE = False
|
||||
|
||||
|
||||
def checkID(module, params):
|
||||
|
||||
data = urlencode(params)
|
||||
full_uri = API_BASE + API_ACTIONS['status'] + data
|
||||
req, info = fetch_url(module, full_uri)
|
||||
result = to_text(req.read())
|
||||
jsonresult = json.loads(result)
|
||||
req.close()
|
||||
return jsonresult
|
||||
|
||||
|
||||
def startMonitor(module, params):
|
||||
|
||||
params['monitorStatus'] = 1
|
||||
data = urlencode(params)
|
||||
full_uri = API_BASE + API_ACTIONS['editMonitor'] + data
|
||||
req, info = fetch_url(module, full_uri)
|
||||
result = to_text(req.read())
|
||||
jsonresult = json.loads(result)
|
||||
req.close()
|
||||
return jsonresult['stat']
|
||||
|
||||
|
||||
def pauseMonitor(module, params):
|
||||
|
||||
params['monitorStatus'] = 0
|
||||
data = urlencode(params)
|
||||
full_uri = API_BASE + API_ACTIONS['editMonitor'] + data
|
||||
req, info = fetch_url(module, full_uri)
|
||||
result = to_text(req.read())
|
||||
jsonresult = json.loads(result)
|
||||
req.close()
|
||||
return jsonresult['stat']
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
state=dict(required=True, choices=['started', 'paused']),
|
||||
apikey=dict(required=True, no_log=True),
|
||||
monitorid=dict(required=True)
|
||||
),
|
||||
supports_check_mode=SUPPORTS_CHECK_MODE
|
||||
)
|
||||
|
||||
params = dict(
|
||||
apiKey=module.params['apikey'],
|
||||
monitors=module.params['monitorid'],
|
||||
monitorID=module.params['monitorid'],
|
||||
format=API_FORMAT,
|
||||
noJsonCallback=API_NOJSONCALLBACK
|
||||
)
|
||||
|
||||
check_result = checkID(module, params)
|
||||
|
||||
if check_result['stat'] != "ok":
|
||||
module.fail_json(
|
||||
msg="failed",
|
||||
result=check_result['message']
|
||||
)
|
||||
|
||||
if module.params['state'] == 'started':
|
||||
monitor_result = startMonitor(module, params)
|
||||
else:
|
||||
monitor_result = pauseMonitor(module, params)
|
||||
|
||||
module.exit_json(
|
||||
msg="success",
|
||||
result=monitor_result
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
2113
plugins/modules/monitoring/zabbix/zabbix_action.py
Normal file
2113
plugins/modules/monitoring/zabbix/zabbix_action.py
Normal file
File diff suppressed because it is too large
Load Diff
208
plugins/modules/monitoring/zabbix/zabbix_group.py
Normal file
208
plugins/modules/monitoring/zabbix/zabbix_group.py
Normal file
@@ -0,0 +1,208 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# (c) 2013-2014, Epic Games, Inc.
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: zabbix_group
|
||||
short_description: Create/delete Zabbix host groups
|
||||
description:
|
||||
- Create host groups if they do not exist.
|
||||
- Delete existing host groups if they exist.
|
||||
author:
|
||||
- "Cove (@cove)"
|
||||
- "Tony Minfei Ding (!UNKNOWN)"
|
||||
- "Harrison Gu (@harrisongu)"
|
||||
requirements:
|
||||
- "python >= 2.6"
|
||||
- "zabbix-api >= 0.5.4"
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- Create or delete host group.
|
||||
required: false
|
||||
type: str
|
||||
default: "present"
|
||||
choices: [ "present", "absent" ]
|
||||
host_groups:
|
||||
description:
|
||||
- List of host groups to create or delete.
|
||||
required: true
|
||||
type: list
|
||||
elements: str
|
||||
aliases: [ "host_group" ]
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.zabbix
|
||||
|
||||
|
||||
notes:
|
||||
- Too many concurrent updates to the same group may cause Zabbix to return errors, see examples for a workaround if needed.
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
# Base create host groups example
|
||||
- name: Create host groups
|
||||
local_action:
|
||||
module: zabbix_group
|
||||
server_url: http://monitor.example.com
|
||||
login_user: username
|
||||
login_password: password
|
||||
state: present
|
||||
host_groups:
|
||||
- Example group1
|
||||
- Example group2
|
||||
|
||||
# Limit the Zabbix group creations to one host since Zabbix can return an error when doing concurrent updates
|
||||
- name: Create host groups
|
||||
local_action:
|
||||
module: zabbix_group
|
||||
server_url: http://monitor.example.com
|
||||
login_user: username
|
||||
login_password: password
|
||||
state: present
|
||||
host_groups:
|
||||
- Example group1
|
||||
- Example group2
|
||||
when: inventory_hostname==groups['group_name'][0]
|
||||
'''
|
||||
|
||||
|
||||
import atexit
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from zabbix_api import ZabbixAPI
|
||||
from zabbix_api import Already_Exists
|
||||
|
||||
HAS_ZABBIX_API = True
|
||||
except ImportError:
|
||||
ZBX_IMP_ERR = traceback.format_exc()
|
||||
HAS_ZABBIX_API = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
|
||||
|
||||
class HostGroup(object):
|
||||
def __init__(self, module, zbx):
|
||||
self._module = module
|
||||
self._zapi = zbx
|
||||
|
||||
# create host group(s) if not exists
|
||||
def create_host_group(self, group_names):
|
||||
try:
|
||||
group_add_list = []
|
||||
for group_name in group_names:
|
||||
result = self._zapi.hostgroup.get({'filter': {'name': group_name}})
|
||||
if not result:
|
||||
try:
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
self._zapi.hostgroup.create({'name': group_name})
|
||||
group_add_list.append(group_name)
|
||||
except Already_Exists:
|
||||
return group_add_list
|
||||
return group_add_list
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to create host group(s): %s" % e)
|
||||
|
||||
# delete host group(s)
|
||||
def delete_host_group(self, group_ids):
|
||||
try:
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
self._zapi.hostgroup.delete(group_ids)
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to delete host group(s), Exception: %s" % e)
|
||||
|
||||
# get group ids by name
|
||||
def get_group_ids(self, host_groups):
|
||||
group_ids = []
|
||||
|
||||
group_list = self._zapi.hostgroup.get({'output': 'extend', 'filter': {'name': host_groups}})
|
||||
for group in group_list:
|
||||
group_id = group['groupid']
|
||||
group_ids.append(group_id)
|
||||
return group_ids, group_list
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
server_url=dict(type='str', required=True, aliases=['url']),
|
||||
login_user=dict(type='str', required=True),
|
||||
login_password=dict(type='str', required=True, no_log=True),
|
||||
http_login_user=dict(type='str', required=False, default=None),
|
||||
http_login_password=dict(type='str', required=False, default=None, no_log=True),
|
||||
validate_certs=dict(type='bool', required=False, default=True),
|
||||
host_groups=dict(type='list', required=True, aliases=['host_group']),
|
||||
state=dict(type='str', default="present", choices=['present', 'absent']),
|
||||
timeout=dict(type='int', default=10)
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if not HAS_ZABBIX_API:
|
||||
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
|
||||
|
||||
server_url = module.params['server_url']
|
||||
login_user = module.params['login_user']
|
||||
login_password = module.params['login_password']
|
||||
http_login_user = module.params['http_login_user']
|
||||
http_login_password = module.params['http_login_password']
|
||||
validate_certs = module.params['validate_certs']
|
||||
host_groups = module.params['host_groups']
|
||||
state = module.params['state']
|
||||
timeout = module.params['timeout']
|
||||
|
||||
zbx = None
|
||||
|
||||
# login to zabbix
|
||||
try:
|
||||
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
|
||||
validate_certs=validate_certs)
|
||||
zbx.login(login_user, login_password)
|
||||
atexit.register(zbx.logout)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
|
||||
|
||||
hostGroup = HostGroup(module, zbx)
|
||||
|
||||
group_ids = []
|
||||
group_list = []
|
||||
if host_groups:
|
||||
group_ids, group_list = hostGroup.get_group_ids(host_groups)
|
||||
|
||||
if state == "absent":
|
||||
# delete host groups
|
||||
if group_ids:
|
||||
delete_group_names = []
|
||||
hostGroup.delete_host_group(group_ids)
|
||||
for group in group_list:
|
||||
delete_group_names.append(group['name'])
|
||||
module.exit_json(changed=True,
|
||||
result="Successfully deleted host group(s): %s." % ",".join(delete_group_names))
|
||||
else:
|
||||
module.exit_json(changed=False, result="No host group(s) to delete.")
|
||||
else:
|
||||
# create host groups
|
||||
group_add_list = hostGroup.create_host_group(host_groups)
|
||||
if len(group_add_list) > 0:
|
||||
module.exit_json(changed=True, result="Successfully created host group(s): %s" % group_add_list)
|
||||
else:
|
||||
module.exit_json(changed=False)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
1
plugins/modules/monitoring/zabbix/zabbix_group_facts.py
Symbolic link
1
plugins/modules/monitoring/zabbix/zabbix_group_facts.py
Symbolic link
@@ -0,0 +1 @@
|
||||
zabbix_group_info.py
|
||||
132
plugins/modules/monitoring/zabbix/zabbix_group_info.py
Normal file
132
plugins/modules/monitoring/zabbix/zabbix_group_info.py
Normal file
@@ -0,0 +1,132 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) me@mimiko.me
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
RETURN = r'''
|
||||
---
|
||||
host_groups:
|
||||
description: List of Zabbix groups.
|
||||
returned: success
|
||||
type: dict
|
||||
sample: [ { "flags": "0", "groupid": "33", "internal": "0", "name": "Hostgruup A" } ]
|
||||
'''
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: zabbix_group_info
|
||||
short_description: Gather information about Zabbix hostgroup
|
||||
description:
|
||||
- This module allows you to search for Zabbix hostgroup entries.
|
||||
- This module was called C(zabbix_group_facts) before Ansible 2.9. The usage did not change.
|
||||
author:
|
||||
- "Michael Miko (@RedWhiteMiko)"
|
||||
requirements:
|
||||
- "python >= 2.6"
|
||||
- "zabbix-api >= 0.5.4"
|
||||
options:
|
||||
hostgroup_name:
|
||||
description:
|
||||
- Name of the hostgroup in Zabbix.
|
||||
- hostgroup is the unique identifier used and cannot be updated using this module.
|
||||
required: true
|
||||
type: list
|
||||
elements: str
|
||||
extends_documentation_fragment:
|
||||
- community.general.zabbix
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: Get hostgroup info
|
||||
local_action:
|
||||
module: zabbix_group_info
|
||||
server_url: http://monitor.example.com
|
||||
login_user: username
|
||||
login_password: password
|
||||
hostgroup_name:
|
||||
- ExampleHostgroup
|
||||
timeout: 10
|
||||
'''
|
||||
|
||||
|
||||
import atexit
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
|
||||
try:
|
||||
from zabbix_api import ZabbixAPI
|
||||
HAS_ZABBIX_API = True
|
||||
except ImportError:
|
||||
ZBX_IMP_ERR = traceback.format_exc()
|
||||
HAS_ZABBIX_API = False
|
||||
|
||||
|
||||
class Host(object):
|
||||
def __init__(self, module, zbx):
|
||||
self._module = module
|
||||
self._zapi = zbx
|
||||
|
||||
def get_group_ids_by_group_names(self, group_names):
|
||||
group_list = self._zapi.hostgroup.get({'output': 'extend', 'filter': {'name': group_names}})
|
||||
if len(group_list) < 1:
|
||||
self._module.fail_json(msg="Hostgroup not found: %s" % group_names)
|
||||
return group_list
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
server_url=dict(type='str', required=True, aliases=['url']),
|
||||
login_user=dict(type='str', required=True),
|
||||
login_password=dict(type='str', required=True, no_log=True),
|
||||
hostgroup_name=dict(type='list', required=True),
|
||||
http_login_user=dict(type='str', required=False, default=None),
|
||||
http_login_password=dict(type='str', required=False, default=None, no_log=True),
|
||||
validate_certs=dict(type='bool', required=False, default=True),
|
||||
timeout=dict(type='int', default=10)
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
if module._name == 'zabbix_group_facts':
|
||||
module.deprecate("The 'zabbix_group_facts' module has been renamed to 'zabbix_group_info'", version='2.13')
|
||||
|
||||
if not HAS_ZABBIX_API:
|
||||
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
|
||||
|
||||
server_url = module.params['server_url']
|
||||
login_user = module.params['login_user']
|
||||
login_password = module.params['login_password']
|
||||
http_login_user = module.params['http_login_user']
|
||||
http_login_password = module.params['http_login_password']
|
||||
validate_certs = module.params['validate_certs']
|
||||
hostgroup_name = module.params['hostgroup_name']
|
||||
timeout = module.params['timeout']
|
||||
|
||||
zbx = None
|
||||
# login to zabbix
|
||||
try:
|
||||
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
|
||||
validate_certs=validate_certs)
|
||||
zbx.login(login_user, login_password)
|
||||
atexit.register(zbx.logout)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
|
||||
|
||||
host = Host(module, zbx)
|
||||
host_groups = host.get_group_ids_by_group_names(hostgroup_name)
|
||||
module.exit_json(host_groups=host_groups)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
1058
plugins/modules/monitoring/zabbix/zabbix_host.py
Normal file
1058
plugins/modules/monitoring/zabbix/zabbix_host.py
Normal file
File diff suppressed because it is too large
Load Diff
336
plugins/modules/monitoring/zabbix/zabbix_host_events_info.py
Normal file
336
plugins/modules/monitoring/zabbix/zabbix_host_events_info.py
Normal file
@@ -0,0 +1,336 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) stephane.travassac@fr.clara.net
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
RETURN = '''
|
||||
---
|
||||
triggers_ok:
|
||||
description: Host Zabbix Triggers in OK state
|
||||
returned: On success
|
||||
type: complex
|
||||
contains:
|
||||
comments:
|
||||
description: Additional description of the trigger
|
||||
type: str
|
||||
description:
|
||||
description: Name of the trigger
|
||||
type: str
|
||||
error:
|
||||
description: Error text if there have been any problems when updating the state of the trigger
|
||||
type: str
|
||||
expression:
|
||||
description: Reduced trigger expression
|
||||
type: str
|
||||
flags:
|
||||
description: Origin of the trigger
|
||||
type: int
|
||||
lastchange:
|
||||
description: Time when the trigger last changed its state (timestamp)
|
||||
type: int
|
||||
priority:
|
||||
description: Severity of the trigger
|
||||
type: int
|
||||
state:
|
||||
description: State of the trigger
|
||||
type: int
|
||||
status:
|
||||
description: Whether the trigger is enabled or disabled
|
||||
type: int
|
||||
templateid:
|
||||
description: ID of the parent template trigger
|
||||
type: int
|
||||
triggerid:
|
||||
description: ID of the trigger
|
||||
type: int
|
||||
type:
|
||||
description: Whether the trigger can generate multiple problem events
|
||||
type: int
|
||||
url:
|
||||
description: URL associated with the trigger
|
||||
type: str
|
||||
value:
|
||||
description: Whether the trigger is in OK or problem state
|
||||
type: int
|
||||
triggers_problem:
|
||||
description: Host Zabbix Triggers in problem state. See trigger and event objects in API documentation of your zabbix version for more
|
||||
returned: On success
|
||||
type: complex
|
||||
contains:
|
||||
comments:
|
||||
description: Additional description of the trigger
|
||||
type: str
|
||||
description:
|
||||
description: Name of the trigger
|
||||
type: str
|
||||
error:
|
||||
description: Error text if there have been any problems when updating the state of the trigger
|
||||
type: str
|
||||
expression:
|
||||
description: Reduced trigger expression
|
||||
type: str
|
||||
flags:
|
||||
description: Origin of the trigger
|
||||
type: int
|
||||
last_event:
|
||||
description: last event informations
|
||||
type: complex
|
||||
contains:
|
||||
acknowledged:
|
||||
description: If set to true return only acknowledged events
|
||||
type: int
|
||||
acknowledges:
|
||||
description: acknowledges informations
|
||||
type: complex
|
||||
contains:
|
||||
alias:
|
||||
description: Account who acknowledge
|
||||
type: str
|
||||
clock:
|
||||
description: Time when the event was created (timestamp)
|
||||
type: int
|
||||
message:
|
||||
description: Text of the acknowledgement message
|
||||
type: str
|
||||
clock:
|
||||
description: Time when the event was created (timestamp)
|
||||
type: int
|
||||
eventid:
|
||||
description: ID of the event
|
||||
type: int
|
||||
value:
|
||||
description: State of the related object
|
||||
type: int
|
||||
lastchange:
|
||||
description: Time when the trigger last changed its state (timestamp)
|
||||
type: int
|
||||
priority:
|
||||
description: Severity of the trigger
|
||||
type: int
|
||||
state:
|
||||
description: State of the trigger
|
||||
type: int
|
||||
status:
|
||||
description: Whether the trigger is enabled or disabled
|
||||
type: int
|
||||
templateid:
|
||||
description: ID of the parent template trigger
|
||||
type: int
|
||||
triggerid:
|
||||
description: ID of the trigger
|
||||
type: int
|
||||
type:
|
||||
description: Whether the trigger can generate multiple problem events
|
||||
type: int
|
||||
url:
|
||||
description: URL associated with the trigger
|
||||
type: str
|
||||
value:
|
||||
description: Whether the trigger is in OK or problem state
|
||||
type: int
|
||||
'''
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: zabbix_host_events_info
|
||||
short_description: Get all triggers about a Zabbix host
|
||||
description:
|
||||
- This module allows you to see if a Zabbix host have no active alert to make actions on it.
|
||||
For this case use module Ansible 'fail' to exclude host in trouble.
|
||||
- Length of "triggers_ok" allow if template's triggers exist for Zabbix Host
|
||||
author:
|
||||
- "Stéphane Travassac (@stravassac)"
|
||||
requirements:
|
||||
- "python >= 2.7"
|
||||
- "zabbix-api >= 0.5.3"
|
||||
options:
|
||||
host_identifier:
|
||||
description:
|
||||
- Identifier of Zabbix Host
|
||||
required: true
|
||||
type: str
|
||||
host_id_type:
|
||||
description:
|
||||
- Type of host_identifier
|
||||
choices:
|
||||
- hostname
|
||||
- visible_name
|
||||
- hostid
|
||||
required: false
|
||||
default: hostname
|
||||
type: str
|
||||
trigger_severity:
|
||||
description:
|
||||
- Zabbix severity for search filter
|
||||
default: average
|
||||
required: false
|
||||
choices:
|
||||
- not_classified
|
||||
- information
|
||||
- warning
|
||||
- average
|
||||
- high
|
||||
- disaster
|
||||
type: str
|
||||
extends_documentation_fragment:
|
||||
- community.general.zabbix
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: exclude machine if alert active on it
|
||||
zabbix_host_events_info:
|
||||
server_url: "{{ zabbix_url }}"
|
||||
login_user: "{{ lookup('env','ZABBIX_USER') }}"
|
||||
login_password: "{{ lookup('env','ZABBIX_PASSWORD') }}"
|
||||
host_identifier: "{{inventory_hostname}}"
|
||||
host_id_type: "hostname"
|
||||
timeout: 120
|
||||
register: zbx_host
|
||||
delegate_to: localhost
|
||||
- fail:
|
||||
msg: "machine alert in zabbix"
|
||||
when: zbx_host['triggers_problem']|length > 0
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
import atexit
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from zabbix_api import ZabbixAPI
|
||||
|
||||
HAS_ZABBIX_API = True
|
||||
except ImportError:
|
||||
ZBX_IMP_ERR = traceback.format_exc()
|
||||
HAS_ZABBIX_API = False
|
||||
|
||||
|
||||
class Host(object):
|
||||
def __init__(self, module, zbx):
|
||||
self._module = module
|
||||
self._zapi = zbx
|
||||
|
||||
def get_host(self, host_identifier, host_inventory, search_key):
|
||||
""" Get host by hostname|visible_name|hostid """
|
||||
host = self._zapi.host.get(
|
||||
{'output': 'extend', 'selectParentTemplates': ['name'], 'filter': {search_key: host_identifier},
|
||||
'selectInventory': host_inventory})
|
||||
if len(host) < 1:
|
||||
self._module.fail_json(msg="Host not found: %s" % host_identifier)
|
||||
else:
|
||||
return host[0]
|
||||
|
||||
def get_triggers_by_host_id_in_problem_state(self, host_id, trigger_severity):
|
||||
""" Get triggers in problem state from a hostid"""
|
||||
# https://www.zabbix.com/documentation/3.4/manual/api/reference/trigger/get
|
||||
output = 'extend'
|
||||
triggers_list = self._zapi.trigger.get({'output': output, 'hostids': host_id,
|
||||
'min_severity': trigger_severity})
|
||||
return triggers_list
|
||||
|
||||
def get_last_event_by_trigger_id(self, triggers_id):
|
||||
""" Get the last event from triggerid"""
|
||||
output = ['eventid', 'clock', 'acknowledged', 'value']
|
||||
select_acknowledges = ['clock', 'alias', 'message']
|
||||
event = self._zapi.event.get({'output': output, 'objectids': triggers_id,
|
||||
'select_acknowledges': select_acknowledges, "limit": 1, "sortfield": "clock",
|
||||
"sortorder": "DESC"})
|
||||
return event[0]
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
server_url=dict(type='str', required=True, aliases=['url']),
|
||||
login_user=dict(type='str', required=True),
|
||||
login_password=dict(type='str', required=True, no_log=True),
|
||||
http_login_user=dict(type='str', required=False, default=None),
|
||||
http_login_password=dict(type='str', required=False, default=None, no_log=True),
|
||||
host_identifier=dict(type='str', required=True),
|
||||
host_id_type=dict(
|
||||
default='hostname',
|
||||
type='str',
|
||||
choices=['hostname', 'visible_name', 'hostid']),
|
||||
trigger_severity=dict(
|
||||
type='str',
|
||||
required=False,
|
||||
default='average',
|
||||
choices=['not_classified', 'information', 'warning', 'average', 'high', 'disaster']),
|
||||
validate_certs=dict(type='bool', required=False, default=True),
|
||||
timeout=dict(type='int', default=10),
|
||||
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if not HAS_ZABBIX_API:
|
||||
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'),
|
||||
exception=ZBX_IMP_ERR)
|
||||
|
||||
trigger_severity_map = {'not_classified': 0, 'information': 1, 'warning': 2, 'average': 3, 'high': 4, 'disaster': 5}
|
||||
server_url = module.params['server_url']
|
||||
login_user = module.params['login_user']
|
||||
login_password = module.params['login_password']
|
||||
http_login_user = module.params['http_login_user']
|
||||
http_login_password = module.params['http_login_password']
|
||||
validate_certs = module.params['validate_certs']
|
||||
host_id = module.params['host_identifier']
|
||||
host_id_type = module.params['host_id_type']
|
||||
trigger_severity = trigger_severity_map[module.params['trigger_severity']]
|
||||
timeout = module.params['timeout']
|
||||
|
||||
host_inventory = 'hostid'
|
||||
zbx = None
|
||||
# login to zabbix
|
||||
try:
|
||||
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
|
||||
validate_certs=validate_certs)
|
||||
zbx.login(login_user, login_password)
|
||||
atexit.register(zbx.logout)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
|
||||
|
||||
host = Host(module, zbx)
|
||||
|
||||
if host_id_type == 'hostname':
|
||||
zabbix_host = host.get_host(host_id, host_inventory, 'host')
|
||||
host_id = zabbix_host['hostid']
|
||||
|
||||
elif host_id_type == 'visible_name':
|
||||
zabbix_host = host.get_host(host_id, host_inventory, 'name')
|
||||
host_id = zabbix_host['hostid']
|
||||
|
||||
elif host_id_type == 'hostid':
|
||||
''' check hostid exist'''
|
||||
zabbix_host = host.get_host(host_id, host_inventory, 'hostid')
|
||||
|
||||
triggers = host.get_triggers_by_host_id_in_problem_state(host_id, trigger_severity)
|
||||
|
||||
triggers_ok = []
|
||||
triggers_problem = []
|
||||
for trigger in triggers:
|
||||
# tGet last event for trigger with problem value = 1
|
||||
# https://www.zabbix.com/documentation/3.4/manual/api/reference/trigger/object
|
||||
if int(trigger['value']) == 1:
|
||||
event = host.get_last_event_by_trigger_id(trigger['triggerid'])
|
||||
trigger['last_event'] = event
|
||||
triggers_problem.append(trigger)
|
||||
else:
|
||||
triggers_ok.append(trigger)
|
||||
|
||||
module.exit_json(ok=True, triggers_ok=triggers_ok, triggers_problem=triggers_problem)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
1
plugins/modules/monitoring/zabbix/zabbix_host_facts.py
Symbolic link
1
plugins/modules/monitoring/zabbix/zabbix_host_facts.py
Symbolic link
@@ -0,0 +1 @@
|
||||
zabbix_host_info.py
|
||||
252
plugins/modules/monitoring/zabbix/zabbix_host_info.py
Normal file
252
plugins/modules/monitoring/zabbix/zabbix_host_info.py
Normal file
@@ -0,0 +1,252 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) me@mimiko.me
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
RETURN = r'''
|
||||
---
|
||||
hosts:
|
||||
description: List of Zabbix hosts. See https://www.zabbix.com/documentation/4.0/manual/api/reference/host/get for list of host values.
|
||||
returned: success
|
||||
type: dict
|
||||
sample: [ { "available": "1", "description": "", "disable_until": "0", "error": "", "flags": "0", "groups": ["1"], "host": "Host A", ... } ]
|
||||
'''
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: zabbix_host_info
|
||||
short_description: Gather information about Zabbix host
|
||||
description:
|
||||
- This module allows you to search for Zabbix host entries.
|
||||
- This module was called C(zabbix_host_facts) before Ansible 2.9. The usage did not change.
|
||||
author:
|
||||
- "Michael Miko (@RedWhiteMiko)"
|
||||
requirements:
|
||||
- "python >= 2.6"
|
||||
- "zabbix-api >= 0.5.4"
|
||||
options:
|
||||
host_name:
|
||||
description:
|
||||
- Name of the host in Zabbix.
|
||||
- host_name is the unique identifier used and cannot be updated using this module.
|
||||
- Required when I(host_ip) is not used.
|
||||
required: false
|
||||
type: str
|
||||
host_ip:
|
||||
description:
|
||||
- Host interface IP of the host in Zabbix.
|
||||
- Required when I(host_name) is not used.
|
||||
required: false
|
||||
type: list
|
||||
elements: str
|
||||
exact_match:
|
||||
description:
|
||||
- Find the exact match
|
||||
type: bool
|
||||
default: no
|
||||
remove_duplicate:
|
||||
description:
|
||||
- Remove duplicate host from host result
|
||||
type: bool
|
||||
default: yes
|
||||
host_inventory:
|
||||
description:
|
||||
- List of host inventory keys to display in result.
|
||||
- Whole host inventory is retrieved if keys are not specified.
|
||||
type: list
|
||||
elements: str
|
||||
required: false
|
||||
extends_documentation_fragment:
|
||||
- community.general.zabbix
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: Get host info
|
||||
local_action:
|
||||
module: zabbix_host_info
|
||||
server_url: http://monitor.example.com
|
||||
login_user: username
|
||||
login_password: password
|
||||
host_name: ExampleHost
|
||||
host_ip: 127.0.0.1
|
||||
timeout: 10
|
||||
exact_match: no
|
||||
remove_duplicate: yes
|
||||
|
||||
- name: Reduce host inventory information to provided keys
|
||||
local_action:
|
||||
module: zabbix_host_info
|
||||
server_url: http://monitor.example.com
|
||||
login_user: username
|
||||
login_password: password
|
||||
host_name: ExampleHost
|
||||
host_inventory:
|
||||
- os
|
||||
- tag
|
||||
host_ip: 127.0.0.1
|
||||
timeout: 10
|
||||
exact_match: no
|
||||
remove_duplicate: yes
|
||||
'''
|
||||
|
||||
|
||||
import atexit
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
|
||||
try:
|
||||
from zabbix_api import ZabbixAPI
|
||||
HAS_ZABBIX_API = True
|
||||
except ImportError:
|
||||
ZBX_IMP_ERR = traceback.format_exc()
|
||||
HAS_ZABBIX_API = False
|
||||
|
||||
|
||||
class Host(object):
|
||||
def __init__(self, module, zbx):
|
||||
self._module = module
|
||||
self._zapi = zbx
|
||||
|
||||
def get_hosts_by_host_name(self, host_name, exact_match, host_inventory):
|
||||
""" Get host by host name """
|
||||
search_key = 'search'
|
||||
if exact_match:
|
||||
search_key = 'filter'
|
||||
host_list = self._zapi.host.get({
|
||||
'output': 'extend',
|
||||
'selectParentTemplates': ['name'],
|
||||
search_key: {'host': [host_name]},
|
||||
'selectInventory': host_inventory,
|
||||
'selectGroups': 'extend',
|
||||
'selectTags': 'extend',
|
||||
'selectMacros': 'extend'
|
||||
})
|
||||
if len(host_list) < 1:
|
||||
self._module.fail_json(msg="Host not found: %s" % host_name)
|
||||
else:
|
||||
return host_list
|
||||
|
||||
def get_hosts_by_ip(self, host_ips, host_inventory):
|
||||
""" Get host by host ip(s) """
|
||||
hostinterfaces = self._zapi.hostinterface.get({
|
||||
'output': 'extend',
|
||||
'filter': {
|
||||
'ip': host_ips
|
||||
}
|
||||
})
|
||||
if len(hostinterfaces) < 1:
|
||||
self._module.fail_json(msg="Host not found: %s" % host_ips)
|
||||
host_list = []
|
||||
for hostinterface in hostinterfaces:
|
||||
host = self._zapi.host.get({
|
||||
'output': 'extend',
|
||||
'selectGroups': 'extend',
|
||||
'selectParentTemplates': ['name'],
|
||||
'hostids': hostinterface['hostid'],
|
||||
'selectInventory': host_inventory,
|
||||
'selectTags': 'extend',
|
||||
'selectMacros': 'extend'
|
||||
})
|
||||
host[0]['hostinterfaces'] = hostinterface
|
||||
host_list.append(host[0])
|
||||
return host_list
|
||||
|
||||
def delete_duplicate_hosts(self, hosts):
|
||||
""" Delete duplicated hosts """
|
||||
unique_hosts = []
|
||||
listed_hostnames = []
|
||||
for zabbix_host in hosts:
|
||||
if zabbix_host['name'] in listed_hostnames:
|
||||
continue
|
||||
unique_hosts.append(zabbix_host)
|
||||
listed_hostnames.append(zabbix_host['name'])
|
||||
return unique_hosts
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
server_url=dict(type='str', required=True, aliases=['url']),
|
||||
login_user=dict(type='str', required=True),
|
||||
login_password=dict(type='str', required=True, no_log=True),
|
||||
host_name=dict(type='str', default='', required=False),
|
||||
host_ip=dict(type='list', default=[], required=False),
|
||||
http_login_user=dict(type='str', required=False, default=None),
|
||||
http_login_password=dict(type='str', required=False, default=None, no_log=True),
|
||||
validate_certs=dict(type='bool', required=False, default=True),
|
||||
timeout=dict(type='int', default=10),
|
||||
exact_match=dict(type='bool', required=False, default=False),
|
||||
remove_duplicate=dict(type='bool', required=False, default=True),
|
||||
host_inventory=dict(type='list', default=[], required=False)
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
if module._name == 'zabbix_host_facts':
|
||||
module.deprecate("The 'zabbix_host_facts' module has been renamed to 'zabbix_host_info'", version='2.13')
|
||||
|
||||
if not HAS_ZABBIX_API:
|
||||
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
|
||||
|
||||
server_url = module.params['server_url']
|
||||
login_user = module.params['login_user']
|
||||
login_password = module.params['login_password']
|
||||
http_login_user = module.params['http_login_user']
|
||||
http_login_password = module.params['http_login_password']
|
||||
validate_certs = module.params['validate_certs']
|
||||
host_name = module.params['host_name']
|
||||
host_ips = module.params['host_ip']
|
||||
timeout = module.params['timeout']
|
||||
exact_match = module.params['exact_match']
|
||||
is_remove_duplicate = module.params['remove_duplicate']
|
||||
host_inventory = module.params['host_inventory']
|
||||
|
||||
if not host_inventory:
|
||||
host_inventory = 'extend'
|
||||
|
||||
zbx = None
|
||||
# login to zabbix
|
||||
try:
|
||||
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
|
||||
validate_certs=validate_certs)
|
||||
zbx.login(login_user, login_password)
|
||||
atexit.register(zbx.logout)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
|
||||
|
||||
host = Host(module, zbx)
|
||||
|
||||
if host_name:
|
||||
hosts = host.get_hosts_by_host_name(host_name, exact_match, host_inventory)
|
||||
if is_remove_duplicate:
|
||||
hosts = host.delete_duplicate_hosts(hosts)
|
||||
extended_hosts = []
|
||||
for zabbix_host in hosts:
|
||||
zabbix_host['hostinterfaces'] = host._zapi.hostinterface.get({
|
||||
'output': 'extend', 'hostids': zabbix_host['hostid']
|
||||
})
|
||||
extended_hosts.append(zabbix_host)
|
||||
module.exit_json(ok=True, hosts=extended_hosts)
|
||||
|
||||
elif host_ips:
|
||||
extended_hosts = host.get_hosts_by_ip(host_ips, host_inventory)
|
||||
if is_remove_duplicate:
|
||||
hosts = host.delete_duplicate_hosts(extended_hosts)
|
||||
module.exit_json(ok=True, hosts=extended_hosts)
|
||||
else:
|
||||
module.exit_json(ok=False, hosts=[], result="No Host present")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
264
plugins/modules/monitoring/zabbix/zabbix_hostmacro.py
Normal file
264
plugins/modules/monitoring/zabbix/zabbix_hostmacro.py
Normal file
@@ -0,0 +1,264 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013-2014, Epic Games, Inc.
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: zabbix_hostmacro
|
||||
short_description: Create/update/delete Zabbix host macros
|
||||
description:
|
||||
- manages Zabbix host macros, it can create, update or delete them.
|
||||
author:
|
||||
- "Cove (@cove)"
|
||||
- Dean Hailin Song (!UNKNOWN)
|
||||
requirements:
|
||||
- "python >= 2.6"
|
||||
- "zabbix-api >= 0.5.4"
|
||||
options:
|
||||
host_name:
|
||||
description:
|
||||
- Name of the host.
|
||||
required: true
|
||||
type: str
|
||||
macro_name:
|
||||
description:
|
||||
- Name of the host macro in zabbix native format C({$MACRO}) or simple format C(MACRO).
|
||||
required: true
|
||||
type: str
|
||||
macro_value:
|
||||
description:
|
||||
- Value of the host macro.
|
||||
- Required if I(state=present).
|
||||
type: str
|
||||
state:
|
||||
description:
|
||||
- State of the macro.
|
||||
- On C(present), it will create if macro does not exist or update the macro if the associated data is different.
|
||||
- On C(absent) will remove a macro if it exists.
|
||||
required: false
|
||||
choices: ['present', 'absent']
|
||||
type: str
|
||||
default: "present"
|
||||
force:
|
||||
description:
|
||||
- Only updates an existing macro if set to C(yes).
|
||||
default: 'yes'
|
||||
type: bool
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.zabbix
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: Create new host macro or update an existing macro's value
|
||||
local_action:
|
||||
module: zabbix_hostmacro
|
||||
server_url: http://monitor.example.com
|
||||
login_user: username
|
||||
login_password: password
|
||||
host_name: ExampleHost
|
||||
macro_name: EXAMPLE.MACRO
|
||||
macro_value: Example value
|
||||
state: present
|
||||
|
||||
# Values with curly brackets need to be quoted otherwise they will be interpreted as a dictionary
|
||||
- name: Create new host macro in Zabbix native format
|
||||
local_action:
|
||||
module: zabbix_hostmacro
|
||||
server_url: http://monitor.example.com
|
||||
login_user: username
|
||||
login_password: password
|
||||
host_name: ExampleHost
|
||||
macro_name: "{$EXAMPLE.MACRO}"
|
||||
macro_value: Example value
|
||||
state: present
|
||||
|
||||
- name: Delete existing host macro
|
||||
local_action:
|
||||
module: zabbix_hostmacro
|
||||
server_url: http://monitor.example.com
|
||||
login_user: username
|
||||
login_password: password
|
||||
host_name: ExampleHost
|
||||
macro_name: "{$EXAMPLE.MACRO}"
|
||||
state: absent
|
||||
'''
|
||||
|
||||
|
||||
import atexit
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from zabbix_api import ZabbixAPI
|
||||
HAS_ZABBIX_API = True
|
||||
except ImportError:
|
||||
ZBX_IMP_ERR = traceback.format_exc()
|
||||
HAS_ZABBIX_API = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
|
||||
|
||||
class HostMacro(object):
|
||||
def __init__(self, module, zbx):
|
||||
self._module = module
|
||||
self._zapi = zbx
|
||||
|
||||
# get host id by host name
|
||||
def get_host_id(self, host_name):
|
||||
try:
|
||||
host_list = self._zapi.host.get({'output': 'extend', 'filter': {'host': host_name}})
|
||||
if len(host_list) < 1:
|
||||
self._module.fail_json(msg="Host not found: %s" % host_name)
|
||||
else:
|
||||
host_id = host_list[0]['hostid']
|
||||
return host_id
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to get the host %s id: %s." % (host_name, e))
|
||||
|
||||
# get host macro
|
||||
def get_host_macro(self, macro_name, host_id):
|
||||
try:
|
||||
host_macro_list = self._zapi.usermacro.get(
|
||||
{"output": "extend", "selectSteps": "extend", 'hostids': [host_id], 'filter': {'macro': macro_name}})
|
||||
if len(host_macro_list) > 0:
|
||||
return host_macro_list[0]
|
||||
return None
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to get host macro %s: %s" % (macro_name, e))
|
||||
|
||||
# create host macro
|
||||
def create_host_macro(self, macro_name, macro_value, host_id):
|
||||
try:
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
self._zapi.usermacro.create({'hostid': host_id, 'macro': macro_name, 'value': macro_value})
|
||||
self._module.exit_json(changed=True, result="Successfully added host macro %s" % macro_name)
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to create host macro %s: %s" % (macro_name, e))
|
||||
|
||||
# update host macro
|
||||
def update_host_macro(self, host_macro_obj, macro_name, macro_value):
|
||||
host_macro_id = host_macro_obj['hostmacroid']
|
||||
if host_macro_obj['macro'] == macro_name and host_macro_obj['value'] == macro_value:
|
||||
self._module.exit_json(changed=False, result="Host macro %s already up to date" % macro_name)
|
||||
try:
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
self._zapi.usermacro.update({'hostmacroid': host_macro_id, 'value': macro_value})
|
||||
self._module.exit_json(changed=True, result="Successfully updated host macro %s" % macro_name)
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to update host macro %s: %s" % (macro_name, e))
|
||||
|
||||
# delete host macro
|
||||
def delete_host_macro(self, host_macro_obj, macro_name):
|
||||
host_macro_id = host_macro_obj['hostmacroid']
|
||||
try:
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
self._zapi.usermacro.delete([host_macro_id])
|
||||
self._module.exit_json(changed=True, result="Successfully deleted host macro %s" % macro_name)
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to delete host macro %s: %s" % (macro_name, e))
|
||||
|
||||
|
||||
def normalize_macro_name(macro_name):
|
||||
# Zabbix handles macro names in upper case characters
|
||||
if ':' in macro_name:
|
||||
macro_name = ':'.join([macro_name.split(':')[0].upper(), ':'.join(macro_name.split(':')[1:])])
|
||||
else:
|
||||
macro_name = macro_name.upper()
|
||||
|
||||
# Valid format for macro is {$MACRO}
|
||||
if not macro_name.startswith('{$'):
|
||||
macro_name = '{$' + macro_name
|
||||
if not macro_name.endswith('}'):
|
||||
macro_name = macro_name + '}'
|
||||
|
||||
return macro_name
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
server_url=dict(type='str', required=True, aliases=['url']),
|
||||
login_user=dict(type='str', required=True),
|
||||
login_password=dict(type='str', required=True, no_log=True),
|
||||
http_login_user=dict(type='str', required=False, default=None),
|
||||
http_login_password=dict(type='str', required=False, default=None, no_log=True),
|
||||
validate_certs=dict(type='bool', required=False, default=True),
|
||||
host_name=dict(type='str', required=True),
|
||||
macro_name=dict(type='str', required=True),
|
||||
macro_value=dict(type='str', required=False),
|
||||
state=dict(type='str', default='present', choices=['present', 'absent']),
|
||||
timeout=dict(type='int', default=10),
|
||||
force=dict(type='bool', default=True)
|
||||
),
|
||||
required_if=[
|
||||
['state', 'present', ['macro_value']]
|
||||
],
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if not HAS_ZABBIX_API:
|
||||
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
|
||||
|
||||
server_url = module.params['server_url']
|
||||
login_user = module.params['login_user']
|
||||
login_password = module.params['login_password']
|
||||
http_login_user = module.params['http_login_user']
|
||||
http_login_password = module.params['http_login_password']
|
||||
validate_certs = module.params['validate_certs']
|
||||
host_name = module.params['host_name']
|
||||
macro_name = normalize_macro_name(module.params['macro_name'])
|
||||
macro_value = module.params['macro_value']
|
||||
state = module.params['state']
|
||||
timeout = module.params['timeout']
|
||||
force = module.params['force']
|
||||
|
||||
zbx = None
|
||||
# login to zabbix
|
||||
try:
|
||||
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
|
||||
validate_certs=validate_certs)
|
||||
zbx.login(login_user, login_password)
|
||||
atexit.register(zbx.logout)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
|
||||
|
||||
host_macro_class_obj = HostMacro(module, zbx)
|
||||
|
||||
if host_name:
|
||||
host_id = host_macro_class_obj.get_host_id(host_name)
|
||||
host_macro_obj = host_macro_class_obj.get_host_macro(macro_name, host_id)
|
||||
|
||||
if state == 'absent':
|
||||
if not host_macro_obj:
|
||||
module.exit_json(changed=False, msg="Host Macro %s does not exist" % macro_name)
|
||||
else:
|
||||
# delete a macro
|
||||
host_macro_class_obj.delete_host_macro(host_macro_obj, macro_name)
|
||||
else:
|
||||
if not host_macro_obj:
|
||||
# create host macro
|
||||
host_macro_class_obj.create_host_macro(macro_name, macro_value, host_id)
|
||||
elif force:
|
||||
# update host macro
|
||||
host_macro_class_obj.update_host_macro(host_macro_obj, macro_name, macro_value)
|
||||
else:
|
||||
module.exit_json(changed=False, result="Host macro %s already exists and force is set to no" % macro_name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
402
plugins/modules/monitoring/zabbix/zabbix_maintenance.py
Normal file
402
plugins/modules/monitoring/zabbix/zabbix_maintenance.py
Normal file
@@ -0,0 +1,402 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013, Alexander Bulimov <lazywolf0@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
|
||||
module: zabbix_maintenance
|
||||
short_description: Create Zabbix maintenance windows
|
||||
description:
|
||||
- This module will let you create Zabbix maintenance windows.
|
||||
author: "Alexander Bulimov (@abulimov)"
|
||||
requirements:
|
||||
- "python >= 2.6"
|
||||
- "zabbix-api >= 0.5.4"
|
||||
options:
|
||||
state:
|
||||
description:
|
||||
- Create or remove a maintenance window. Maintenance window to remove is identified by name.
|
||||
default: present
|
||||
choices: [ "present", "absent" ]
|
||||
type: str
|
||||
host_names:
|
||||
description:
|
||||
- Hosts to manage maintenance window for.
|
||||
- B(Required) option when I(state=present) and I(host_groups) is not used.
|
||||
aliases: [ "host_name" ]
|
||||
type: list
|
||||
elements: str
|
||||
host_groups:
|
||||
description:
|
||||
- Host groups to manage maintenance window for.
|
||||
- B(Required) option when I(state=present) and I(host_names) is not used.
|
||||
aliases: [ "host_group" ]
|
||||
type: list
|
||||
elements: str
|
||||
minutes:
|
||||
description:
|
||||
- Length of maintenance window in minutes.
|
||||
default: 10
|
||||
type: int
|
||||
name:
|
||||
description:
|
||||
- Unique name of maintenance window.
|
||||
required: true
|
||||
type: str
|
||||
desc:
|
||||
description:
|
||||
- Short description of maintenance window.
|
||||
default: Created by Ansible
|
||||
type: str
|
||||
collect_data:
|
||||
description:
|
||||
- Type of maintenance. With data collection, or without.
|
||||
type: bool
|
||||
default: 'yes'
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.zabbix
|
||||
|
||||
|
||||
notes:
|
||||
- Useful for setting hosts in maintenance mode before big update,
|
||||
and removing maintenance window after update.
|
||||
- Module creates maintenance window from now() to now() + minutes,
|
||||
so if Zabbix server's time and host's time are not synchronized,
|
||||
you will get strange results.
|
||||
- Install required module with 'pip install zabbix-api' command.
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: Create a named maintenance window for host www1 for 90 minutes
|
||||
zabbix_maintenance:
|
||||
name: Update of www1
|
||||
host_name: www1.example.com
|
||||
state: present
|
||||
minutes: 90
|
||||
server_url: https://monitoring.example.com
|
||||
login_user: ansible
|
||||
login_password: pAsSwOrD
|
||||
|
||||
- name: Create a named maintenance window for host www1 and host groups Office and Dev
|
||||
zabbix_maintenance:
|
||||
name: Update of www1
|
||||
host_name: www1.example.com
|
||||
host_groups:
|
||||
- Office
|
||||
- Dev
|
||||
state: present
|
||||
server_url: https://monitoring.example.com
|
||||
login_user: ansible
|
||||
login_password: pAsSwOrD
|
||||
|
||||
- name: Create a named maintenance window for hosts www1 and db1, without data collection.
|
||||
zabbix_maintenance:
|
||||
name: update
|
||||
host_names:
|
||||
- www1.example.com
|
||||
- db1.example.com
|
||||
state: present
|
||||
collect_data: False
|
||||
server_url: https://monitoring.example.com
|
||||
login_user: ansible
|
||||
login_password: pAsSwOrD
|
||||
|
||||
- name: Remove maintenance window by name
|
||||
zabbix_maintenance:
|
||||
name: Test1
|
||||
state: absent
|
||||
server_url: https://monitoring.example.com
|
||||
login_user: ansible
|
||||
login_password: pAsSwOrD
|
||||
'''
|
||||
|
||||
|
||||
import atexit
|
||||
import datetime
|
||||
import time
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from zabbix_api import ZabbixAPI
|
||||
HAS_ZABBIX_API = True
|
||||
except ImportError:
|
||||
ZBX_IMP_ERR = traceback.format_exc()
|
||||
HAS_ZABBIX_API = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
|
||||
|
||||
def create_maintenance(zbx, group_ids, host_ids, start_time, maintenance_type, period, name, desc):
|
||||
end_time = start_time + period
|
||||
try:
|
||||
zbx.maintenance.create(
|
||||
{
|
||||
"groupids": group_ids,
|
||||
"hostids": host_ids,
|
||||
"name": name,
|
||||
"maintenance_type": maintenance_type,
|
||||
"active_since": str(start_time),
|
||||
"active_till": str(end_time),
|
||||
"description": desc,
|
||||
"timeperiods": [{
|
||||
"timeperiod_type": "0",
|
||||
"start_date": str(start_time),
|
||||
"period": str(period),
|
||||
}]
|
||||
}
|
||||
)
|
||||
# zabbix_api can call sys.exit() so we need to catch SystemExit here
|
||||
except (Exception, SystemExit) as e:
|
||||
return 1, None, str(e)
|
||||
return 0, None, None
|
||||
|
||||
|
||||
def update_maintenance(zbx, maintenance_id, group_ids, host_ids, start_time, maintenance_type, period, desc):
|
||||
end_time = start_time + period
|
||||
try:
|
||||
zbx.maintenance.update(
|
||||
{
|
||||
"maintenanceid": maintenance_id,
|
||||
"groupids": group_ids,
|
||||
"hostids": host_ids,
|
||||
"maintenance_type": maintenance_type,
|
||||
"active_since": str(start_time),
|
||||
"active_till": str(end_time),
|
||||
"description": desc,
|
||||
"timeperiods": [{
|
||||
"timeperiod_type": "0",
|
||||
"start_date": str(start_time),
|
||||
"period": str(period),
|
||||
}]
|
||||
}
|
||||
)
|
||||
# zabbix_api can call sys.exit() so we need to catch SystemExit here
|
||||
except (Exception, SystemExit) as e:
|
||||
return 1, None, str(e)
|
||||
return 0, None, None
|
||||
|
||||
|
||||
def get_maintenance(zbx, name):
|
||||
try:
|
||||
maintenances = zbx.maintenance.get(
|
||||
{
|
||||
"filter":
|
||||
{
|
||||
"name": name,
|
||||
},
|
||||
"selectGroups": "extend",
|
||||
"selectHosts": "extend"
|
||||
}
|
||||
)
|
||||
# zabbix_api can call sys.exit() so we need to catch SystemExit here
|
||||
except (Exception, SystemExit) as e:
|
||||
return 1, None, str(e)
|
||||
|
||||
for maintenance in maintenances:
|
||||
maintenance["groupids"] = [group["groupid"] for group in maintenance["groups"]] if "groups" in maintenance else []
|
||||
maintenance["hostids"] = [host["hostid"] for host in maintenance["hosts"]] if "hosts" in maintenance else []
|
||||
return 0, maintenance, None
|
||||
|
||||
return 0, None, None
|
||||
|
||||
|
||||
def delete_maintenance(zbx, maintenance_id):
|
||||
try:
|
||||
zbx.maintenance.delete([maintenance_id])
|
||||
# zabbix_api can call sys.exit() so we need to catch SystemExit here
|
||||
except (Exception, SystemExit) as e:
|
||||
return 1, None, str(e)
|
||||
return 0, None, None
|
||||
|
||||
|
||||
def get_group_ids(zbx, host_groups):
|
||||
group_ids = []
|
||||
for group in host_groups:
|
||||
try:
|
||||
result = zbx.hostgroup.get(
|
||||
{
|
||||
"output": "extend",
|
||||
"filter":
|
||||
{
|
||||
"name": group
|
||||
}
|
||||
}
|
||||
)
|
||||
# zabbix_api can call sys.exit() so we need to catch SystemExit here
|
||||
except (Exception, SystemExit) as e:
|
||||
return 1, None, str(e)
|
||||
|
||||
if not result:
|
||||
return 1, None, "Group id for group %s not found" % group
|
||||
|
||||
group_ids.append(result[0]["groupid"])
|
||||
|
||||
return 0, group_ids, None
|
||||
|
||||
|
||||
def get_host_ids(zbx, host_names):
|
||||
host_ids = []
|
||||
for host in host_names:
|
||||
try:
|
||||
result = zbx.host.get(
|
||||
{
|
||||
"output": "extend",
|
||||
"filter":
|
||||
{
|
||||
"name": host
|
||||
}
|
||||
}
|
||||
)
|
||||
# zabbix_api can call sys.exit() so we need to catch SystemExit here
|
||||
except (Exception, SystemExit) as e:
|
||||
return 1, None, str(e)
|
||||
|
||||
if not result:
|
||||
return 1, None, "Host id for host %s not found" % host
|
||||
|
||||
host_ids.append(result[0]["hostid"])
|
||||
|
||||
return 0, host_ids, None
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
state=dict(type='str', required=False, default='present', choices=['present', 'absent']),
|
||||
server_url=dict(type='str', required=True, aliases=['url']),
|
||||
host_names=dict(type='list', required=False, default=None, aliases=['host_name']),
|
||||
minutes=dict(type='int', required=False, default=10),
|
||||
host_groups=dict(type='list', required=False, default=None, aliases=['host_group']),
|
||||
login_user=dict(type='str', required=True),
|
||||
login_password=dict(type='str', required=True, no_log=True),
|
||||
validate_certs=dict(type='bool', required=False, default=True),
|
||||
http_login_user=dict(type='str', required=False, default=None),
|
||||
http_login_password=dict(type='str', required=False, default=None, no_log=True),
|
||||
name=dict(type='str', required=True),
|
||||
desc=dict(type='str', required=False, default="Created by Ansible"),
|
||||
collect_data=dict(type='bool', required=False, default=True),
|
||||
timeout=dict(type='int', default=10),
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
if not HAS_ZABBIX_API:
|
||||
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
|
||||
|
||||
host_names = module.params['host_names']
|
||||
host_groups = module.params['host_groups']
|
||||
state = module.params['state']
|
||||
login_user = module.params['login_user']
|
||||
login_password = module.params['login_password']
|
||||
http_login_user = module.params['http_login_user']
|
||||
http_login_password = module.params['http_login_password']
|
||||
validate_certs = module.params['validate_certs']
|
||||
minutes = module.params['minutes']
|
||||
name = module.params['name']
|
||||
desc = module.params['desc']
|
||||
server_url = module.params['server_url']
|
||||
collect_data = module.params['collect_data']
|
||||
timeout = module.params['timeout']
|
||||
|
||||
if collect_data:
|
||||
maintenance_type = 0
|
||||
else:
|
||||
maintenance_type = 1
|
||||
|
||||
try:
|
||||
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
|
||||
validate_certs=validate_certs)
|
||||
zbx.login(login_user, login_password)
|
||||
atexit.register(zbx.logout)
|
||||
# zabbix_api can call sys.exit() so we need to catch SystemExit here
|
||||
except (Exception, SystemExit) as e:
|
||||
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
|
||||
|
||||
changed = False
|
||||
|
||||
if state == "present":
|
||||
|
||||
if not host_names and not host_groups:
|
||||
module.fail_json(msg="At least one host_name or host_group must be defined for each created maintenance.")
|
||||
|
||||
now = datetime.datetime.now().replace(second=0)
|
||||
start_time = time.mktime(now.timetuple())
|
||||
period = 60 * int(minutes) # N * 60 seconds
|
||||
|
||||
if host_groups:
|
||||
(rc, group_ids, error) = get_group_ids(zbx, host_groups)
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed to get group_ids: %s" % error)
|
||||
else:
|
||||
group_ids = []
|
||||
|
||||
if host_names:
|
||||
(rc, host_ids, error) = get_host_ids(zbx, host_names)
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed to get host_ids: %s" % error)
|
||||
else:
|
||||
host_ids = []
|
||||
|
||||
(rc, maintenance, error) = get_maintenance(zbx, name)
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed to check maintenance %s existence: %s" % (name, error))
|
||||
|
||||
if maintenance and (
|
||||
sorted(group_ids) != sorted(maintenance["groupids"]) or
|
||||
sorted(host_ids) != sorted(maintenance["hostids"]) or
|
||||
str(maintenance_type) != maintenance["maintenance_type"] or
|
||||
str(int(start_time)) != maintenance["active_since"] or
|
||||
str(int(start_time + period)) != maintenance["active_till"]
|
||||
):
|
||||
if module.check_mode:
|
||||
changed = True
|
||||
else:
|
||||
(rc, data, error) = update_maintenance(zbx, maintenance["maintenanceid"], group_ids, host_ids, start_time, maintenance_type, period, desc)
|
||||
if rc == 0:
|
||||
changed = True
|
||||
else:
|
||||
module.fail_json(msg="Failed to update maintenance: %s" % error)
|
||||
|
||||
if not maintenance:
|
||||
if module.check_mode:
|
||||
changed = True
|
||||
else:
|
||||
(rc, data, error) = create_maintenance(zbx, group_ids, host_ids, start_time, maintenance_type, period, name, desc)
|
||||
if rc == 0:
|
||||
changed = True
|
||||
else:
|
||||
module.fail_json(msg="Failed to create maintenance: %s" % error)
|
||||
|
||||
if state == "absent":
|
||||
|
||||
(rc, maintenance, error) = get_maintenance(zbx, name)
|
||||
if rc != 0:
|
||||
module.fail_json(msg="Failed to check maintenance %s existence: %s" % (name, error))
|
||||
|
||||
if maintenance:
|
||||
if module.check_mode:
|
||||
changed = True
|
||||
else:
|
||||
(rc, data, error) = delete_maintenance(zbx, maintenance["maintenanceid"])
|
||||
if rc == 0:
|
||||
changed = True
|
||||
else:
|
||||
module.fail_json(msg="Failed to remove maintenance: %s" % error)
|
||||
|
||||
module.exit_json(changed=changed)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
829
plugins/modules/monitoring/zabbix/zabbix_map.py
Normal file
829
plugins/modules/monitoring/zabbix/zabbix_map.py
Normal file
@@ -0,0 +1,829 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2017-2018, Antony Alekseyev <antony.alekseyev@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: zabbix_map
|
||||
author:
|
||||
- "Antony Alekseyev (@Akint)"
|
||||
short_description: Create/update/delete Zabbix maps
|
||||
description:
|
||||
- "This module allows you to create, modify and delete Zabbix map entries,
|
||||
using Graphviz binaries and text description written in DOT language.
|
||||
Nodes of the graph will become map elements and edges will become links between map elements.
|
||||
See U(https://en.wikipedia.org/wiki/DOT_(graph_description_language)) and U(https://www.graphviz.org/) for details.
|
||||
Inspired by U(http://blog.zabbix.com/maps-for-the-lazy/)."
|
||||
- "The following extra node attributes are supported:
|
||||
C(zbx_host) contains name of the host in Zabbix. Use this if desired type of map element is C(host).
|
||||
C(zbx_group) contains name of the host group in Zabbix. Use this if desired type of map element is C(host group).
|
||||
C(zbx_map) contains name of the map in Zabbix. Use this if desired type of map element is C(map).
|
||||
C(zbx_label) contains label of map element.
|
||||
C(zbx_image) contains name of the image used to display the element in default state.
|
||||
C(zbx_image_disabled) contains name of the image used to display disabled map element.
|
||||
C(zbx_image_maintenance) contains name of the image used to display map element in maintenance.
|
||||
C(zbx_image_problem) contains name of the image used to display map element with problems.
|
||||
C(zbx_url) contains map element URL in C(name:url) format.
|
||||
More than one URL could be specified by adding a postfix (e.g., C(zbx_url1), C(zbx_url2))."
|
||||
- "The following extra link attributes are supported:
|
||||
C(zbx_draw_style) contains link line draw style. Possible values: C(line), C(bold), C(dotted), C(dashed).
|
||||
C(zbx_trigger) contains name of the trigger used as a link indicator in C(host_name:trigger_name) format.
|
||||
More than one trigger could be specified by adding a postfix (e.g., C(zbx_trigger1), C(zbx_trigger2)).
|
||||
C(zbx_trigger_color) contains indicator color specified either as CSS3 name or as a hexadecimal code starting with C(#).
|
||||
C(zbx_trigger_draw_style) contains indicator draw style. Possible values are the same as for C(zbx_draw_style)."
|
||||
requirements:
|
||||
- "python >= 2.6"
|
||||
- "zabbix-api >= 0.5.4"
|
||||
- pydotplus
|
||||
- webcolors
|
||||
- Pillow
|
||||
- Graphviz
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- Name of the map.
|
||||
required: true
|
||||
aliases: [ "map_name" ]
|
||||
type: str
|
||||
data:
|
||||
description:
|
||||
- Graph written in DOT language.
|
||||
required: false
|
||||
aliases: [ "dot_data" ]
|
||||
type: str
|
||||
state:
|
||||
description:
|
||||
- State of the map.
|
||||
- On C(present), it will create if map does not exist or update the map if the associated data is different.
|
||||
- On C(absent) will remove the map if it exists.
|
||||
required: false
|
||||
choices: ['present', 'absent']
|
||||
default: "present"
|
||||
type: str
|
||||
width:
|
||||
description:
|
||||
- Width of the map.
|
||||
required: false
|
||||
default: 800
|
||||
type: int
|
||||
height:
|
||||
description:
|
||||
- Height of the map.
|
||||
required: false
|
||||
default: 600
|
||||
type: int
|
||||
margin:
|
||||
description:
|
||||
- Size of white space between map's borders and its elements.
|
||||
required: false
|
||||
default: 40
|
||||
type: int
|
||||
expand_problem:
|
||||
description:
|
||||
- Whether the problem trigger will be displayed for elements with a single problem.
|
||||
required: false
|
||||
type: bool
|
||||
default: true
|
||||
highlight:
|
||||
description:
|
||||
- Whether icon highlighting is enabled.
|
||||
required: false
|
||||
type: bool
|
||||
default: true
|
||||
label_type:
|
||||
description:
|
||||
- Map element label type.
|
||||
required: false
|
||||
choices: ['label', 'ip', 'name', 'status', 'nothing', 'custom']
|
||||
default: "name"
|
||||
type: str
|
||||
default_image:
|
||||
description:
|
||||
- Name of the Zabbix image used to display the element if this element doesn't have the C(zbx_image) attribute defined.
|
||||
required: false
|
||||
aliases: [ "image" ]
|
||||
type: str
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.zabbix
|
||||
|
||||
'''
|
||||
|
||||
RETURN = r''' # '''
|
||||
|
||||
EXAMPLES = r'''
|
||||
###
|
||||
### Example inventory:
|
||||
# [web]
|
||||
# web[01:03].example.com ansible_host=127.0.0.1
|
||||
# [db]
|
||||
# db.example.com ansible_host=127.0.0.1
|
||||
# [backup]
|
||||
# backup.example.com ansible_host=127.0.0.1
|
||||
###
|
||||
### Each inventory host is present in Zabbix with a matching name.
|
||||
###
|
||||
### Contents of 'map.j2':
|
||||
# digraph G {
|
||||
# graph [layout=dot splines=false overlap=scale]
|
||||
# INTERNET [zbx_url="Google:https://google.com" zbx_image="Cloud_(96)"]
|
||||
# {% for web_host in groups.web %}
|
||||
# {% set web_loop = loop %}
|
||||
# web{{ '%03d' % web_loop.index }} [zbx_host="{{ web_host }}"]
|
||||
# INTERNET -> web{{ '%03d' % web_loop.index }} [zbx_trigger="{{ web_host }}:Zabbix agent on {HOST.NAME} is unreachable for 5 minutes"]
|
||||
# {% for db_host in groups.db %}
|
||||
# {% set db_loop = loop %}
|
||||
# web{{ '%03d' % web_loop.index }} -> db{{ '%03d' % db_loop.index }}
|
||||
# {% endfor %}
|
||||
# {% endfor %}
|
||||
# { rank=same
|
||||
# {% for db_host in groups.db %}
|
||||
# {% set db_loop = loop %}
|
||||
# db{{ '%03d' % db_loop.index }} [zbx_host="{{ db_host }}"]
|
||||
# {% for backup_host in groups.backup %}
|
||||
# {% set backup_loop = loop %}
|
||||
# db{{ '%03d' % db_loop.index }} -> backup{{ '%03d' % backup_loop.index }} [color="blue"]
|
||||
# {% endfor %}
|
||||
# {% endfor %}
|
||||
# {% for backup_host in groups.backup %}
|
||||
# {% set backup_loop = loop %}
|
||||
# backup{{ '%03d' % backup_loop.index }} [zbx_host="{{ backup_host }}"]
|
||||
# {% endfor %}
|
||||
# }
|
||||
# }
|
||||
###
|
||||
### Create Zabbix map "Demo Map" made of template 'map.j2'
|
||||
- name: Create Zabbix map
|
||||
zabbix_map:
|
||||
server_url: http://zabbix.example.com
|
||||
login_user: username
|
||||
login_password: password
|
||||
name: Demo map
|
||||
state: present
|
||||
data: "{{ lookup('template', 'map.j2') }}"
|
||||
default_image: Server_(64)
|
||||
expand_problem: no
|
||||
highlight: no
|
||||
label_type: label
|
||||
delegate_to: localhost
|
||||
run_once: yes
|
||||
'''
|
||||
|
||||
ANSIBLE_METADATA = {
|
||||
'metadata_version': '1.1',
|
||||
'supported_by': 'community',
|
||||
'status': ['preview']
|
||||
}
|
||||
|
||||
|
||||
import atexit
|
||||
import base64
|
||||
import traceback
|
||||
|
||||
from io import BytesIO
|
||||
from operator import itemgetter
|
||||
from distutils.version import StrictVersion
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
|
||||
try:
|
||||
import pydotplus
|
||||
HAS_PYDOTPLUS = True
|
||||
except ImportError:
|
||||
PYDOT_IMP_ERR = traceback.format_exc()
|
||||
HAS_PYDOTPLUS = False
|
||||
|
||||
try:
|
||||
import webcolors
|
||||
HAS_WEBCOLORS = True
|
||||
except ImportError:
|
||||
WEBCOLORS_IMP_ERR = traceback.format_exc()
|
||||
HAS_WEBCOLORS = False
|
||||
|
||||
try:
|
||||
from zabbix_api import ZabbixAPI
|
||||
HAS_ZABBIX_API = True
|
||||
except ImportError:
|
||||
ZBX_IMP_ERR = traceback.format_exc()
|
||||
HAS_ZABBIX_API = False
|
||||
|
||||
try:
|
||||
from PIL import Image
|
||||
HAS_PIL = True
|
||||
except ImportError:
|
||||
PIL_IMP_ERR = traceback.format_exc()
|
||||
HAS_PIL = False
|
||||
|
||||
|
||||
class Map():
|
||||
def __init__(self, module, zbx):
|
||||
self._module = module
|
||||
self._zapi = zbx
|
||||
|
||||
self.map_name = module.params['name']
|
||||
self.dot_data = module.params['data']
|
||||
self.width = module.params['width']
|
||||
self.height = module.params['height']
|
||||
self.state = module.params['state']
|
||||
self.default_image = module.params['default_image']
|
||||
self.map_id = self._get_sysmap_id(self.map_name)
|
||||
self.margin = module.params['margin']
|
||||
self.expand_problem = module.params['expand_problem']
|
||||
self.highlight = module.params['highlight']
|
||||
self.label_type = module.params['label_type']
|
||||
self.api_version = self._zapi.api_version()
|
||||
self.selements_sort_keys = self._get_selements_sort_keys()
|
||||
|
||||
def _build_graph(self):
|
||||
try:
|
||||
graph_without_positions = pydotplus.graph_from_dot_data(self.dot_data)
|
||||
dot_data_with_positions = graph_without_positions.create_dot()
|
||||
graph_with_positions = pydotplus.graph_from_dot_data(dot_data_with_positions)
|
||||
if graph_with_positions:
|
||||
return graph_with_positions
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to build graph from DOT data: %s" % e)
|
||||
|
||||
def get_map_config(self):
|
||||
if not self.dot_data:
|
||||
self._module.fail_json(msg="'data' is mandatory with state 'present'")
|
||||
graph = self._build_graph()
|
||||
nodes = self._get_graph_nodes(graph)
|
||||
edges = self._get_graph_edges(graph)
|
||||
icon_ids = self._get_icon_ids()
|
||||
map_config = {
|
||||
'name': self.map_name,
|
||||
'label_type': self._get_label_type_id(self.label_type),
|
||||
'expandproblem': int(self.expand_problem),
|
||||
'highlight': int(self.highlight),
|
||||
'width': self.width,
|
||||
'height': self.height,
|
||||
'selements': self._get_selements(graph, nodes, icon_ids),
|
||||
'links': self._get_links(nodes, edges),
|
||||
}
|
||||
return map_config
|
||||
|
||||
def _get_label_type_id(self, label_type):
|
||||
label_type_ids = {
|
||||
'label': 0,
|
||||
'ip': 1,
|
||||
'name': 2,
|
||||
'status': 3,
|
||||
'nothing': 4,
|
||||
'custom': 5,
|
||||
}
|
||||
try:
|
||||
label_type_id = label_type_ids[label_type]
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to find id for label type '%s': %s" % (label_type, e))
|
||||
return label_type_id
|
||||
|
||||
def _get_images_info(self, data, icon_ids):
|
||||
images = [
|
||||
{
|
||||
'dot_tag': 'zbx_image',
|
||||
'zbx_property': 'iconid_off',
|
||||
'mandatory': True
|
||||
},
|
||||
{
|
||||
'dot_tag': 'zbx_image_disabled',
|
||||
'zbx_property': 'iconid_disabled',
|
||||
'mandatory': False
|
||||
},
|
||||
{
|
||||
'dot_tag': 'zbx_image_maintenance',
|
||||
'zbx_property': 'iconid_maintenance',
|
||||
'mandatory': False
|
||||
},
|
||||
{
|
||||
'dot_tag': 'zbx_image_problem',
|
||||
'zbx_property': 'iconid_on',
|
||||
'mandatory': False
|
||||
}
|
||||
]
|
||||
images_info = {}
|
||||
default_image = self.default_image if self.default_image else sorted(icon_ids.items())[0][0]
|
||||
for image in images:
|
||||
image_name = data.get(image['dot_tag'], None)
|
||||
if not image_name:
|
||||
if image['mandatory']:
|
||||
image_name = default_image
|
||||
else:
|
||||
continue
|
||||
image_name = remove_quotes(image_name)
|
||||
if image_name in icon_ids:
|
||||
images_info[image['zbx_property']] = icon_ids[image_name]
|
||||
if not image['mandatory']:
|
||||
images_info['use_iconmap'] = 0
|
||||
else:
|
||||
self._module.fail_json(msg="Failed to find id for image '%s'" % image_name)
|
||||
return images_info
|
||||
|
||||
def _get_element_type(self, data):
|
||||
types = {
|
||||
'host': 0,
|
||||
'sysmap': 1,
|
||||
'trigger': 2,
|
||||
'group': 3,
|
||||
'image': 4
|
||||
}
|
||||
element_type = {
|
||||
'elementtype': types['image'],
|
||||
}
|
||||
if StrictVersion(self.api_version) < StrictVersion('3.4'):
|
||||
element_type.update({
|
||||
'elementid': "0",
|
||||
})
|
||||
for type_name, type_id in sorted(types.items()):
|
||||
field_name = 'zbx_' + type_name
|
||||
if field_name in data:
|
||||
method_name = '_get_' + type_name + '_id'
|
||||
element_name = remove_quotes(data[field_name])
|
||||
get_element_id = getattr(self, method_name, None)
|
||||
if get_element_id:
|
||||
elementid = get_element_id(element_name)
|
||||
if elementid and int(elementid) > 0:
|
||||
element_type.update({
|
||||
'elementtype': type_id,
|
||||
'label': element_name
|
||||
})
|
||||
if StrictVersion(self.api_version) < StrictVersion('3.4'):
|
||||
element_type.update({
|
||||
'elementid': elementid,
|
||||
})
|
||||
else:
|
||||
element_type.update({
|
||||
'elements': [{
|
||||
type_name + 'id': elementid,
|
||||
}],
|
||||
})
|
||||
break
|
||||
else:
|
||||
self._module.fail_json(msg="Failed to find id for %s '%s'" % (type_name, element_name))
|
||||
return element_type
|
||||
|
||||
# get list of map elements (nodes)
|
||||
def _get_selements(self, graph, nodes, icon_ids):
|
||||
selements = []
|
||||
icon_sizes = {}
|
||||
scales = self._get_scales(graph)
|
||||
for selementid, (node, data) in enumerate(nodes.items(), start=1):
|
||||
selement = {
|
||||
'selementid': selementid
|
||||
}
|
||||
data['selementid'] = selementid
|
||||
|
||||
images_info = self._get_images_info(data, icon_ids)
|
||||
selement.update(images_info)
|
||||
image_id = images_info['iconid_off']
|
||||
if image_id not in icon_sizes:
|
||||
icon_sizes[image_id] = self._get_icon_size(image_id)
|
||||
|
||||
pos = self._convert_coordinates(data['pos'], scales, icon_sizes[image_id])
|
||||
selement.update(pos)
|
||||
|
||||
selement['label'] = remove_quotes(node)
|
||||
element_type = self._get_element_type(data)
|
||||
selement.update(element_type)
|
||||
|
||||
label = self._get_label(data)
|
||||
if label:
|
||||
selement['label'] = label
|
||||
|
||||
urls = self._get_urls(data)
|
||||
if urls:
|
||||
selement['urls'] = urls
|
||||
|
||||
selements.append(selement)
|
||||
return selements
|
||||
|
||||
def _get_links(self, nodes, edges):
|
||||
links = {}
|
||||
for edge in edges:
|
||||
link_id = tuple(sorted(edge.obj_dict['points']))
|
||||
node1, node2 = link_id
|
||||
data = edge.obj_dict['attributes']
|
||||
|
||||
if "style" in data and data['style'] == "invis":
|
||||
continue
|
||||
|
||||
if link_id not in links:
|
||||
links[link_id] = {
|
||||
'selementid1': min(nodes[node1]['selementid'], nodes[node2]['selementid']),
|
||||
'selementid2': max(nodes[node1]['selementid'], nodes[node2]['selementid']),
|
||||
}
|
||||
link = links[link_id]
|
||||
|
||||
if "color" not in link:
|
||||
link['color'] = self._get_color_hex(remove_quotes(data.get('color', 'green')))
|
||||
|
||||
if "zbx_draw_style" not in link:
|
||||
link['drawtype'] = self._get_link_draw_style_id(remove_quotes(data.get('zbx_draw_style', 'line')))
|
||||
|
||||
label = self._get_label(data)
|
||||
if label and "label" not in link:
|
||||
link['label'] = label
|
||||
|
||||
triggers = self._get_triggers(data)
|
||||
if triggers:
|
||||
if "linktriggers" not in link:
|
||||
link['linktriggers'] = []
|
||||
link['linktriggers'] += triggers
|
||||
|
||||
return list(links.values())
|
||||
|
||||
def _get_urls(self, data):
|
||||
urls = []
|
||||
for url_raw in [remove_quotes(value) for key, value in data.items() if key.startswith("zbx_url")]:
|
||||
try:
|
||||
name, url = url_raw.split(':', 1)
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to parse zbx_url='%s': %s" % (url_raw, e))
|
||||
urls.append({
|
||||
'name': name,
|
||||
'url': url,
|
||||
})
|
||||
return urls
|
||||
|
||||
def _get_triggers(self, data):
|
||||
triggers = []
|
||||
for trigger_definition in [remove_quotes(value) for key, value in data.items() if key.startswith("zbx_trigger")]:
|
||||
triggerid = self._get_trigger_id(trigger_definition)
|
||||
if triggerid:
|
||||
triggers.append({
|
||||
'triggerid': triggerid,
|
||||
'color': self._get_color_hex(remove_quotes(data.get('zbx_trigger_color', 'red'))),
|
||||
'drawtype': self._get_link_draw_style_id(remove_quotes(data.get('zbx_trigger_draw_style', 'bold'))),
|
||||
})
|
||||
else:
|
||||
self._module.fail_json(msg="Failed to find trigger '%s'" % (trigger_definition))
|
||||
return triggers
|
||||
|
||||
@staticmethod
|
||||
def _get_label(data, default=None):
|
||||
if "zbx_label" in data:
|
||||
label = remove_quotes(data['zbx_label']).replace('\\n', '\n')
|
||||
elif "label" in data:
|
||||
label = remove_quotes(data['label'])
|
||||
else:
|
||||
label = default
|
||||
return label
|
||||
|
||||
def _get_sysmap_id(self, map_name):
|
||||
exist_map = self._zapi.map.get({'filter': {'name': map_name}})
|
||||
if exist_map:
|
||||
return exist_map[0]['sysmapid']
|
||||
return None
|
||||
|
||||
def _get_group_id(self, group_name):
|
||||
exist_group = self._zapi.hostgroup.get({'filter': {'name': group_name}})
|
||||
if exist_group:
|
||||
return exist_group[0]['groupid']
|
||||
return None
|
||||
|
||||
def map_exists(self):
|
||||
return bool(self.map_id)
|
||||
|
||||
def create_map(self, map_config):
|
||||
try:
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
result = self._zapi.map.create(map_config)
|
||||
if result:
|
||||
return result
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to create map: %s" % e)
|
||||
|
||||
def update_map(self, map_config):
|
||||
if not self.map_id:
|
||||
self._module.fail_json(msg="Failed to update map: map_id is unknown. Try to create_map instead.")
|
||||
try:
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
map_config['sysmapid'] = self.map_id
|
||||
result = self._zapi.map.update(map_config)
|
||||
if result:
|
||||
return result
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to update map: %s" % e)
|
||||
|
||||
def delete_map(self):
|
||||
if not self.map_id:
|
||||
self._module.fail_json(msg="Failed to delete map: map_id is unknown.")
|
||||
try:
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
self._zapi.map.delete([self.map_id])
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to delete map, Exception: %s" % e)
|
||||
|
||||
def is_exist_map_correct(self, generated_map_config):
|
||||
exist_map_configs = self._zapi.map.get({
|
||||
'sysmapids': self.map_id,
|
||||
'selectLinks': 'extend',
|
||||
'selectSelements': 'extend'
|
||||
})
|
||||
exist_map_config = exist_map_configs[0]
|
||||
if not self._is_dicts_equal(generated_map_config, exist_map_config):
|
||||
return False
|
||||
if not self._is_selements_equal(generated_map_config['selements'], exist_map_config['selements']):
|
||||
return False
|
||||
self._update_ids(generated_map_config, exist_map_config)
|
||||
if not self._is_links_equal(generated_map_config['links'], exist_map_config['links']):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _get_selements_sort_keys(self):
|
||||
keys_to_sort = ['label']
|
||||
if StrictVersion(self.api_version) < StrictVersion('3.4'):
|
||||
keys_to_sort.insert(0, 'elementid')
|
||||
return keys_to_sort
|
||||
|
||||
def _is_selements_equal(self, generated_selements, exist_selements):
|
||||
if len(generated_selements) != len(exist_selements):
|
||||
return False
|
||||
generated_selements_sorted = sorted(generated_selements, key=itemgetter(*self.selements_sort_keys))
|
||||
exist_selements_sorted = sorted(exist_selements, key=itemgetter(*self.selements_sort_keys))
|
||||
for (generated_selement, exist_selement) in zip(generated_selements_sorted, exist_selements_sorted):
|
||||
if StrictVersion(self.api_version) >= StrictVersion("3.4"):
|
||||
if not self._is_elements_equal(generated_selement.get('elements', []), exist_selement.get('elements', [])):
|
||||
return False
|
||||
if not self._is_dicts_equal(generated_selement, exist_selement, ['selementid']):
|
||||
return False
|
||||
if not self._is_urls_equal(generated_selement.get('urls', []), exist_selement.get('urls', [])):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _is_urls_equal(self, generated_urls, exist_urls):
|
||||
if len(generated_urls) != len(exist_urls):
|
||||
return False
|
||||
generated_urls_sorted = sorted(generated_urls, key=itemgetter('name', 'url'))
|
||||
exist_urls_sorted = sorted(exist_urls, key=itemgetter('name', 'url'))
|
||||
for (generated_url, exist_url) in zip(generated_urls_sorted, exist_urls_sorted):
|
||||
if not self._is_dicts_equal(generated_url, exist_url, ['selementid']):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _is_elements_equal(self, generated_elements, exist_elements):
|
||||
if len(generated_elements) != len(exist_elements):
|
||||
return False
|
||||
generated_elements_sorted = sorted(generated_elements, key=lambda k: k.values()[0])
|
||||
exist_elements_sorted = sorted(exist_elements, key=lambda k: k.values()[0])
|
||||
for (generated_element, exist_element) in zip(generated_elements_sorted, exist_elements_sorted):
|
||||
if not self._is_dicts_equal(generated_element, exist_element, ['selementid']):
|
||||
return False
|
||||
return True
|
||||
|
||||
# since generated IDs differ from real Zabbix ones, make real IDs match generated ones
|
||||
def _update_ids(self, generated_map_config, exist_map_config):
|
||||
generated_selements_sorted = sorted(generated_map_config['selements'], key=itemgetter(*self.selements_sort_keys))
|
||||
exist_selements_sorted = sorted(exist_map_config['selements'], key=itemgetter(*self.selements_sort_keys))
|
||||
id_mapping = {}
|
||||
for (generated_selement, exist_selement) in zip(generated_selements_sorted, exist_selements_sorted):
|
||||
id_mapping[exist_selement['selementid']] = generated_selement['selementid']
|
||||
for link in exist_map_config['links']:
|
||||
link['selementid1'] = id_mapping[link['selementid1']]
|
||||
link['selementid2'] = id_mapping[link['selementid2']]
|
||||
if link['selementid2'] < link['selementid1']:
|
||||
link['selementid1'], link['selementid2'] = link['selementid2'], link['selementid1']
|
||||
|
||||
def _is_links_equal(self, generated_links, exist_links):
|
||||
if len(generated_links) != len(exist_links):
|
||||
return False
|
||||
generated_links_sorted = sorted(generated_links, key=itemgetter('selementid1', 'selementid2', 'color', 'drawtype'))
|
||||
exist_links_sorted = sorted(exist_links, key=itemgetter('selementid1', 'selementid2', 'color', 'drawtype'))
|
||||
for (generated_link, exist_link) in zip(generated_links_sorted, exist_links_sorted):
|
||||
if not self._is_dicts_equal(generated_link, exist_link, ['selementid1', 'selementid2']):
|
||||
return False
|
||||
if not self._is_triggers_equal(generated_link.get('linktriggers', []), exist_link.get('linktriggers', [])):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _is_triggers_equal(self, generated_triggers, exist_triggers):
|
||||
if len(generated_triggers) != len(exist_triggers):
|
||||
return False
|
||||
generated_triggers_sorted = sorted(generated_triggers, key=itemgetter('triggerid'))
|
||||
exist_triggers_sorted = sorted(exist_triggers, key=itemgetter('triggerid'))
|
||||
for (generated_trigger, exist_trigger) in zip(generated_triggers_sorted, exist_triggers_sorted):
|
||||
if not self._is_dicts_equal(generated_trigger, exist_trigger):
|
||||
return False
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _is_dicts_equal(d1, d2, exclude_keys=None):
|
||||
if exclude_keys is None:
|
||||
exclude_keys = []
|
||||
for key in d1.keys():
|
||||
if isinstance(d1[key], dict) or isinstance(d1[key], list):
|
||||
continue
|
||||
if key in exclude_keys:
|
||||
continue
|
||||
# compare as strings since Zabbix API returns everything as strings
|
||||
if key not in d2 or str(d2[key]) != str(d1[key]):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _get_host_id(self, hostname):
|
||||
hostid = self._zapi.host.get({'filter': {'host': hostname}})
|
||||
if hostid:
|
||||
return str(hostid[0]['hostid'])
|
||||
|
||||
def _get_trigger_id(self, trigger_definition):
|
||||
try:
|
||||
host, trigger = trigger_definition.split(':', 1)
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to parse zbx_trigger='%s': %s" % (trigger_definition, e))
|
||||
triggerid = self._zapi.trigger.get({
|
||||
'host': host,
|
||||
'filter': {
|
||||
'description': trigger
|
||||
}
|
||||
})
|
||||
if triggerid:
|
||||
return str(triggerid[0]['triggerid'])
|
||||
|
||||
def _get_icon_ids(self):
|
||||
icons_list = self._zapi.image.get({})
|
||||
icon_ids = {}
|
||||
for icon in icons_list:
|
||||
icon_ids[icon['name']] = icon['imageid']
|
||||
return icon_ids
|
||||
|
||||
def _get_icon_size(self, icon_id):
|
||||
icons_list = self._zapi.image.get({
|
||||
'imageids': [
|
||||
icon_id
|
||||
],
|
||||
'select_image': True
|
||||
})
|
||||
if len(icons_list) > 0:
|
||||
icon_base64 = icons_list[0]['image']
|
||||
else:
|
||||
self._module.fail_json(msg="Failed to find image with id %s" % icon_id)
|
||||
image = Image.open(BytesIO(base64.b64decode(icon_base64)))
|
||||
icon_width, icon_height = image.size
|
||||
return icon_width, icon_height
|
||||
|
||||
@staticmethod
|
||||
def _get_node_attributes(node):
|
||||
attr = {}
|
||||
if "attributes" in node.obj_dict:
|
||||
attr.update(node.obj_dict['attributes'])
|
||||
pos = node.get_pos()
|
||||
if pos is not None:
|
||||
pos = remove_quotes(pos)
|
||||
xx, yy = pos.split(",")
|
||||
attr['pos'] = (float(xx), float(yy))
|
||||
return attr
|
||||
|
||||
def _get_graph_nodes(self, parent):
|
||||
nodes = {}
|
||||
for node in parent.get_nodes():
|
||||
node_name = node.get_name()
|
||||
if node_name in ('node', 'graph', 'edge'):
|
||||
continue
|
||||
nodes[node_name] = self._get_node_attributes(node)
|
||||
for subgraph in parent.get_subgraphs():
|
||||
nodes.update(self._get_graph_nodes(subgraph))
|
||||
return nodes
|
||||
|
||||
def _get_graph_edges(self, parent):
|
||||
edges = []
|
||||
for edge in parent.get_edges():
|
||||
edges.append(edge)
|
||||
for subgraph in parent.get_subgraphs():
|
||||
edges += self._get_graph_edges(subgraph)
|
||||
return edges
|
||||
|
||||
def _get_scales(self, graph):
|
||||
bb = remove_quotes(graph.get_bb())
|
||||
min_x, min_y, max_x, max_y = bb.split(",")
|
||||
scale_x = (self.width - self.margin * 2) / (float(max_x) - float(min_x)) if float(max_x) != float(min_x) else 0
|
||||
scale_y = (self.height - self.margin * 2) / (float(max_y) - float(min_y)) if float(max_y) != float(min_y) else 0
|
||||
return {
|
||||
'min_x': float(min_x),
|
||||
'min_y': float(min_y),
|
||||
'max_x': float(max_x),
|
||||
'max_y': float(max_y),
|
||||
'scale_x': float(scale_x),
|
||||
'scale_y': float(scale_y),
|
||||
}
|
||||
|
||||
# transform Graphviz coordinates to Zabbix's ones
|
||||
def _convert_coordinates(self, pos, scales, icon_size):
|
||||
return {
|
||||
'x': int((pos[0] - scales['min_x']) * scales['scale_x'] - icon_size[0] / 2 + self.margin),
|
||||
'y': int((scales['max_y'] - pos[1] + scales['min_y']) * scales['scale_y'] - icon_size[1] / 2 + self.margin),
|
||||
}
|
||||
|
||||
def _get_color_hex(self, color_name):
|
||||
if color_name.startswith('#'):
|
||||
color_hex = color_name
|
||||
else:
|
||||
try:
|
||||
color_hex = webcolors.name_to_hex(color_name)
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to get RGB hex for color '%s': %s" % (color_name, e))
|
||||
color_hex = color_hex.strip('#').upper()
|
||||
return color_hex
|
||||
|
||||
def _get_link_draw_style_id(self, draw_style):
|
||||
draw_style_ids = {
|
||||
'line': 0,
|
||||
'bold': 2,
|
||||
'dotted': 3,
|
||||
'dashed': 4
|
||||
}
|
||||
try:
|
||||
draw_style_id = draw_style_ids[draw_style]
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to find id for draw type '%s': %s" % (draw_style, e))
|
||||
return draw_style_id
|
||||
|
||||
|
||||
# If a string has single or double quotes around it, remove them.
|
||||
def remove_quotes(s):
|
||||
if (s[0] == s[-1]) and s.startswith(("'", '"')):
|
||||
s = s[1:-1]
|
||||
return s
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
server_url=dict(type='str', required=True, aliases=['url']),
|
||||
login_user=dict(type='str', required=True),
|
||||
login_password=dict(type='str', required=True, no_log=True),
|
||||
http_login_user=dict(type='str', required=False, default=None),
|
||||
http_login_password=dict(type='str', required=False, default=None, no_log=True),
|
||||
timeout=dict(type='int', default=10),
|
||||
validate_certs=dict(type='bool', required=False, default=True),
|
||||
name=dict(type='str', required=True, aliases=['map_name']),
|
||||
data=dict(type='str', required=False, aliases=['dot_data']),
|
||||
width=dict(type='int', default=800),
|
||||
height=dict(type='int', default=600),
|
||||
state=dict(type='str', default="present", choices=['present', 'absent']),
|
||||
default_image=dict(type='str', required=False, aliases=['image']),
|
||||
margin=dict(type='int', default=40),
|
||||
expand_problem=dict(type='bool', default=True),
|
||||
highlight=dict(type='bool', default=True),
|
||||
label_type=dict(type='str', default='name', choices=['label', 'ip', 'name', 'status', 'nothing', 'custom']),
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if not HAS_ZABBIX_API:
|
||||
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
|
||||
if not HAS_PYDOTPLUS:
|
||||
module.fail_json(msg=missing_required_lib('pydotplus', url='https://pypi.org/project/pydotplus/'), exception=PYDOT_IMP_ERR)
|
||||
if not HAS_WEBCOLORS:
|
||||
module.fail_json(msg=missing_required_lib('webcolors', url='https://pypi.org/project/webcolors/'), exception=WEBCOLORS_IMP_ERR)
|
||||
if not HAS_PIL:
|
||||
module.fail_json(msg=missing_required_lib('Pillow', url='https://pypi.org/project/Pillow/'), exception=PIL_IMP_ERR)
|
||||
|
||||
server_url = module.params['server_url']
|
||||
login_user = module.params['login_user']
|
||||
login_password = module.params['login_password']
|
||||
http_login_user = module.params['http_login_user']
|
||||
http_login_password = module.params['http_login_password']
|
||||
timeout = module.params['timeout']
|
||||
validate_certs = module.params['validate_certs']
|
||||
|
||||
zbx = None
|
||||
|
||||
# login to zabbix
|
||||
try:
|
||||
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
|
||||
validate_certs=validate_certs)
|
||||
zbx.login(login_user, login_password)
|
||||
atexit.register(zbx.logout)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
|
||||
|
||||
sysmap = Map(module, zbx)
|
||||
|
||||
if sysmap.state == "absent":
|
||||
if sysmap.map_exists():
|
||||
sysmap.delete_map()
|
||||
module.exit_json(changed=True, result="Successfully deleted map: %s" % sysmap.map_name)
|
||||
else:
|
||||
module.exit_json(changed=False)
|
||||
else:
|
||||
map_config = sysmap.get_map_config()
|
||||
if sysmap.map_exists():
|
||||
if sysmap.is_exist_map_correct(map_config):
|
||||
module.exit_json(changed=False)
|
||||
else:
|
||||
sysmap.update_map(map_config)
|
||||
module.exit_json(changed=True, result="Successfully updated map: %s" % sysmap.map_name)
|
||||
else:
|
||||
sysmap.create_map(map_config)
|
||||
module.exit_json(changed=True, result="Successfully created map: %s" % sysmap.map_name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
705
plugins/modules/monitoring/zabbix/zabbix_mediatype.py
Normal file
705
plugins/modules/monitoring/zabbix/zabbix_mediatype.py
Normal file
@@ -0,0 +1,705 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: zabbix_mediatype
|
||||
short_description: Create/Update/Delete Zabbix media types
|
||||
description:
|
||||
- This module allows you to create, modify and delete Zabbix media types.
|
||||
author:
|
||||
- Ruben Tsirunyan (@rubentsirunyan)
|
||||
requirements:
|
||||
- "zabbix-api >= 0.5.4"
|
||||
|
||||
options:
|
||||
name:
|
||||
type: 'str'
|
||||
description:
|
||||
- Name of the media type.
|
||||
required: true
|
||||
state:
|
||||
type: 'str'
|
||||
description:
|
||||
- Desired state of the mediatype.
|
||||
- On C(present), it will create a mediatype if it does not exist or update the mediatype if the associated data is different.
|
||||
- On C(absent), it will remove the mediatype if it exists.
|
||||
choices:
|
||||
- present
|
||||
- absent
|
||||
default: 'present'
|
||||
type:
|
||||
type: 'str'
|
||||
description:
|
||||
- Type of the media type.
|
||||
- Media types I(jabber) and I(ez_texting) workable only with Zabbix 4.2 or less.
|
||||
choices:
|
||||
- email
|
||||
- script
|
||||
- sms
|
||||
- jabber
|
||||
- ez_texting
|
||||
required: true
|
||||
status:
|
||||
type: 'str'
|
||||
description:
|
||||
- Whether the media type is enabled or no.
|
||||
choices:
|
||||
- enabled
|
||||
- disabled
|
||||
default: 'enabled'
|
||||
max_sessions:
|
||||
type: 'int'
|
||||
description:
|
||||
- The maximum number of alerts that can be processed in parallel.
|
||||
- Possible value is 1 when I(type=sms) and 0-100 otherwise.
|
||||
default: 1
|
||||
max_attempts:
|
||||
type: 'int'
|
||||
description:
|
||||
- The maximum number of attempts to send an alert.
|
||||
- Possible range is 0-10
|
||||
default: 3
|
||||
attempt_interval:
|
||||
type: 'int'
|
||||
description:
|
||||
- The interval between retry attempts.
|
||||
- Possible range is 0-60
|
||||
default: 10
|
||||
script_name:
|
||||
type: 'str'
|
||||
description:
|
||||
- The name of the executed script.
|
||||
- Required when I(type=script).
|
||||
script_params:
|
||||
type: 'list'
|
||||
elements: str
|
||||
description:
|
||||
- List of script parameters.
|
||||
- Required when I(type=script).
|
||||
gsm_modem:
|
||||
type: 'str'
|
||||
description:
|
||||
- Serial device name of the gsm modem.
|
||||
- Required when I(type=sms).
|
||||
username:
|
||||
type: 'str'
|
||||
description:
|
||||
- Username or Jabber identifier.
|
||||
- Required when I(type=jabber) or I(type=ez_texting).
|
||||
- Required when I(type=email) and I(smtp_authentication=true).
|
||||
password:
|
||||
type: 'str'
|
||||
description:
|
||||
- Authentication password.
|
||||
- Required when I(type=jabber) or I(type=ez_texting).
|
||||
- Required when I(type=email) and I(smtp_authentication=true).
|
||||
smtp_server:
|
||||
type: 'str'
|
||||
description:
|
||||
- SMTP server host.
|
||||
- Required when I(type=email).
|
||||
default: 'localhost'
|
||||
smtp_server_port:
|
||||
type: 'int'
|
||||
description:
|
||||
- SMTP server port.
|
||||
- Required when I(type=email).
|
||||
default: 25
|
||||
smtp_helo:
|
||||
type: 'str'
|
||||
description:
|
||||
- SMTP HELO.
|
||||
- Required when I(type=email).
|
||||
default: 'localhost'
|
||||
smtp_email:
|
||||
type: 'str'
|
||||
description:
|
||||
- Email address from which notifications will be sent.
|
||||
- Required when I(type=email).
|
||||
smtp_authentication:
|
||||
type: 'bool'
|
||||
description:
|
||||
- Whether SMTP authentication with username and password should be enabled or not.
|
||||
- If set to C(true), C(username) and C(password) should be specified.
|
||||
default: false
|
||||
smtp_security:
|
||||
type: 'str'
|
||||
description:
|
||||
- SMTP connection security level to use.
|
||||
choices:
|
||||
- None
|
||||
- STARTTLS
|
||||
- SSL/TLS
|
||||
smtp_verify_host:
|
||||
type: 'bool'
|
||||
description:
|
||||
- SSL verify host for SMTP.
|
||||
- Can be specified when I(smtp_security=STARTTLS) or I(smtp_security=SSL/TLS)
|
||||
default: false
|
||||
smtp_verify_peer:
|
||||
type: 'bool'
|
||||
description:
|
||||
- SSL verify peer for SMTP.
|
||||
- Can be specified when I(smtp_security=STARTTLS) or I(smtp_security=SSL/TLS)
|
||||
default: false
|
||||
message_text_limit:
|
||||
type: 'str'
|
||||
description:
|
||||
- The message text limit.
|
||||
- Required when I(type=ez_texting).
|
||||
- 160 characters for USA and 136 characters for Canada.
|
||||
choices:
|
||||
- USA
|
||||
- Canada
|
||||
extends_documentation_fragment:
|
||||
- community.general.zabbix
|
||||
|
||||
|
||||
'''
|
||||
|
||||
RETURN = r''' # '''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: 'Create an email mediatype with SMTP authentication'
|
||||
zabbix_mediatype:
|
||||
name: "Ops email"
|
||||
server_url: "http://example.com/zabbix/"
|
||||
login_user: Admin
|
||||
login_password: "zabbix"
|
||||
type: 'email'
|
||||
smtp_server: 'example.com'
|
||||
smtp_server_port: 2000
|
||||
smtp_email: 'ops@example.com'
|
||||
smtp_authentication: true
|
||||
username: 'smtp_user'
|
||||
password: 'smtp_pass'
|
||||
|
||||
- name: 'Create a script mediatype'
|
||||
zabbix_mediatype:
|
||||
name: "my script"
|
||||
server_url: "http://example.com/zabbix/"
|
||||
login_user: Admin
|
||||
login_password: "zabbix"
|
||||
type: 'script'
|
||||
script_name: 'my_script.py'
|
||||
script_params:
|
||||
- 'arg1'
|
||||
- 'arg2'
|
||||
|
||||
- name: 'Create a jabber mediatype'
|
||||
zabbix_mediatype:
|
||||
name: "My jabber"
|
||||
server_url: "http://example.com/zabbix/"
|
||||
login_user: Admin
|
||||
login_password: "zabbix"
|
||||
type: 'jabber'
|
||||
username: 'jabber_id'
|
||||
password: 'jabber_pass'
|
||||
|
||||
- name: 'Create an SMS mediatype'
|
||||
zabbix_mediatype:
|
||||
name: "My SMS Mediatype"
|
||||
server_url: "http://example.com/zabbix/"
|
||||
login_user: Admin
|
||||
login_password: "zabbix"
|
||||
type: 'sms'
|
||||
gsm_modem: '/dev/ttyS0'
|
||||
'''
|
||||
|
||||
|
||||
import atexit
|
||||
import traceback
|
||||
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
|
||||
try:
|
||||
from zabbix_api import ZabbixAPI
|
||||
HAS_ZABBIX_API = True
|
||||
except ImportError:
|
||||
ZBX_IMP_ERR = traceback.format_exc()
|
||||
HAS_ZABBIX_API = False
|
||||
|
||||
|
||||
def to_numeric_value(value, strs):
|
||||
return strs.get(value)
|
||||
|
||||
|
||||
def validate_params(module, params):
|
||||
"""Validates arguments that are required together.
|
||||
|
||||
Fails the module with the message that shows the missing
|
||||
requirements if there are some.
|
||||
|
||||
Args:
|
||||
module: AnsibleModule object.
|
||||
params (list): Each element of this list
|
||||
is a list like
|
||||
['argument_key', 'argument_value', ['required_arg_1',
|
||||
'required_arg_2']].
|
||||
Format is the same as `required_if` parameter of AnsibleModule.
|
||||
"""
|
||||
for param in params:
|
||||
if module.params[param[0]] == param[1]:
|
||||
if None in [module.params[i] for i in param[2]]:
|
||||
module.fail_json(
|
||||
msg="Following arguments are required when {key} is {value}: {arguments}".format(
|
||||
key=param[0],
|
||||
value=param[1],
|
||||
arguments=', '.join(param[2])
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def construct_parameters(**kwargs):
|
||||
"""Translates data to a format suitable for Zabbix API and filters
|
||||
the ones that are related to the specified mediatype type.
|
||||
|
||||
Args:
|
||||
**kwargs: Arguments passed to the module.
|
||||
|
||||
Returns:
|
||||
A dictionary of arguments that are related to kwargs['transport_type'],
|
||||
and are in a format that is understandable by Zabbix API.
|
||||
"""
|
||||
if kwargs['transport_type'] == 'email':
|
||||
return dict(
|
||||
description=kwargs['name'],
|
||||
status=to_numeric_value(kwargs['status'],
|
||||
{'enabled': '0',
|
||||
'disabled': '1'}),
|
||||
type=to_numeric_value(kwargs['transport_type'],
|
||||
{'email': '0',
|
||||
'script': '1',
|
||||
'sms': '2',
|
||||
'jabber': '3',
|
||||
'ez_texting': '100'}),
|
||||
maxsessions=str(kwargs['max_sessions']),
|
||||
maxattempts=str(kwargs['max_attempts']),
|
||||
attempt_interval=str(kwargs['attempt_interval']),
|
||||
smtp_server=kwargs['smtp_server'],
|
||||
smtp_port=str(kwargs['smtp_server_port']),
|
||||
smtp_helo=kwargs['smtp_helo'],
|
||||
smtp_email=kwargs['smtp_email'],
|
||||
smtp_security=to_numeric_value(str(kwargs['smtp_security']),
|
||||
{'None': '0',
|
||||
'STARTTLS': '1',
|
||||
'SSL/TLS': '2'}),
|
||||
smtp_authentication=to_numeric_value(str(kwargs['smtp_authentication']),
|
||||
{'False': '0',
|
||||
'True': '1'}),
|
||||
smtp_verify_host=to_numeric_value(str(kwargs['smtp_verify_host']),
|
||||
{'False': '0',
|
||||
'True': '1'}),
|
||||
smtp_verify_peer=to_numeric_value(str(kwargs['smtp_verify_peer']),
|
||||
{'False': '0',
|
||||
'True': '1'}),
|
||||
username=kwargs['username'],
|
||||
passwd=kwargs['password']
|
||||
)
|
||||
|
||||
elif kwargs['transport_type'] == 'script':
|
||||
if kwargs['script_params'] is None:
|
||||
_script_params = '' # ZBX-15706
|
||||
else:
|
||||
_script_params = '\n'.join(str(i) for i in kwargs['script_params']) + '\n'
|
||||
return dict(
|
||||
description=kwargs['name'],
|
||||
status=to_numeric_value(kwargs['status'],
|
||||
{'enabled': '0',
|
||||
'disabled': '1'}),
|
||||
type=to_numeric_value(kwargs['transport_type'],
|
||||
{'email': '0',
|
||||
'script': '1',
|
||||
'sms': '2',
|
||||
'jabber': '3',
|
||||
'ez_texting': '100'}),
|
||||
maxsessions=str(kwargs['max_sessions']),
|
||||
maxattempts=str(kwargs['max_attempts']),
|
||||
attempt_interval=str(kwargs['attempt_interval']),
|
||||
exec_path=kwargs['script_name'],
|
||||
exec_params=_script_params
|
||||
)
|
||||
elif kwargs['transport_type'] == 'sms':
|
||||
return dict(
|
||||
description=kwargs['name'],
|
||||
status=to_numeric_value(kwargs['status'],
|
||||
{'enabled': '0',
|
||||
'disabled': '1'}),
|
||||
type=to_numeric_value(kwargs['transport_type'],
|
||||
{'email': '0',
|
||||
'script': '1',
|
||||
'sms': '2',
|
||||
'jabber': '3',
|
||||
'ez_texting': '100'}),
|
||||
maxsessions=str(kwargs['max_sessions']),
|
||||
maxattempts=str(kwargs['max_attempts']),
|
||||
attempt_interval=str(kwargs['attempt_interval']),
|
||||
gsm_modem=kwargs['gsm_modem']
|
||||
)
|
||||
elif kwargs['transport_type'] == 'jabber' and LooseVersion(kwargs['zbx_api_version']) <= LooseVersion('4.2'):
|
||||
return dict(
|
||||
description=kwargs['name'],
|
||||
status=to_numeric_value(kwargs['status'],
|
||||
{'enabled': '0',
|
||||
'disabled': '1'}),
|
||||
type=to_numeric_value(kwargs['transport_type'],
|
||||
{'email': '0',
|
||||
'script': '1',
|
||||
'sms': '2',
|
||||
'jabber': '3',
|
||||
'ez_texting': '100'}),
|
||||
maxsessions=str(kwargs['max_sessions']),
|
||||
maxattempts=str(kwargs['max_attempts']),
|
||||
attempt_interval=str(kwargs['attempt_interval']),
|
||||
username=kwargs['username'],
|
||||
passwd=kwargs['password']
|
||||
)
|
||||
elif kwargs['transport_type'] == 'ez_texting' and LooseVersion(kwargs['zbx_api_version']) <= LooseVersion('4.2'):
|
||||
return dict(
|
||||
description=kwargs['name'],
|
||||
status=to_numeric_value(kwargs['status'],
|
||||
{'enabled': '0',
|
||||
'disabled': '1'}),
|
||||
type=to_numeric_value(kwargs['transport_type'],
|
||||
{'email': '0',
|
||||
'script': '1',
|
||||
'sms': '2',
|
||||
'jabber': '3',
|
||||
'ez_texting': '100'}),
|
||||
maxsessions=str(kwargs['max_sessions']),
|
||||
maxattempts=str(kwargs['max_attempts']),
|
||||
attempt_interval=str(kwargs['attempt_interval']),
|
||||
username=kwargs['username'],
|
||||
passwd=kwargs['password'],
|
||||
exec_path=to_numeric_value(kwargs['message_text_limit'],
|
||||
{'USA': '0',
|
||||
'Canada': '1'}),
|
||||
)
|
||||
|
||||
return {'unsupported_parameter': kwargs['transport_type'], 'zbx_api_version': kwargs['zbx_api_version']}
|
||||
|
||||
|
||||
def check_if_mediatype_exists(module, zbx, name, zbx_api_version):
|
||||
"""Checks if mediatype exists.
|
||||
|
||||
Args:
|
||||
module: AnsibleModule object
|
||||
zbx: ZabbixAPI object
|
||||
name: Zabbix mediatype name
|
||||
|
||||
Returns:
|
||||
Tuple of (True, `id of the mediatype`) if mediatype exists, (False, None) otherwise
|
||||
"""
|
||||
filter_key_name = 'description'
|
||||
if LooseVersion(zbx_api_version) >= LooseVersion('4.4'):
|
||||
# description key changed to name key from zabbix 4.4
|
||||
filter_key_name = 'name'
|
||||
|
||||
try:
|
||||
mediatype_list = zbx.mediatype.get({
|
||||
'output': 'extend',
|
||||
'filter': {filter_key_name: [name]}
|
||||
})
|
||||
if len(mediatype_list) < 1:
|
||||
return False, None
|
||||
else:
|
||||
return True, mediatype_list[0]['mediatypeid']
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to get ID of the mediatype '{name}': {e}".format(name=name, e=e))
|
||||
|
||||
|
||||
def diff(existing, new):
|
||||
"""Constructs the diff for Ansible's --diff option.
|
||||
|
||||
Args:
|
||||
existing (dict): Existing mediatype data.
|
||||
new (dict): New mediatype data.
|
||||
|
||||
Returns:
|
||||
A dictionary like {'before': existing, 'after': new}
|
||||
with filtered empty values.
|
||||
"""
|
||||
before = {}
|
||||
after = {}
|
||||
for key in new:
|
||||
before[key] = existing[key]
|
||||
if new[key] is None:
|
||||
after[key] = ''
|
||||
else:
|
||||
after[key] = new[key]
|
||||
return {'before': before, 'after': after}
|
||||
|
||||
|
||||
def get_update_params(module, zbx, mediatype_id, **kwargs):
|
||||
"""Filters only the parameters that are different and need to be updated.
|
||||
|
||||
Args:
|
||||
module: AnsibleModule object.
|
||||
zbx: ZabbixAPI object.
|
||||
mediatype_id (int): ID of the mediatype to be updated.
|
||||
**kwargs: Parameters for the new mediatype.
|
||||
|
||||
Returns:
|
||||
A tuple where the first element is a dictionary of parameters
|
||||
that need to be updated and the second one is a dictionary
|
||||
returned by diff() function with
|
||||
existing mediatype data and new params passed to it.
|
||||
"""
|
||||
existing_mediatype = zbx.mediatype.get({
|
||||
'output': 'extend',
|
||||
'mediatypeids': [mediatype_id]
|
||||
})[0]
|
||||
|
||||
if existing_mediatype['type'] != kwargs['type']:
|
||||
return kwargs, diff(existing_mediatype, kwargs)
|
||||
else:
|
||||
params_to_update = {}
|
||||
for key in kwargs:
|
||||
if (not (kwargs[key] is None and existing_mediatype[key] == '')) and kwargs[key] != existing_mediatype[key]:
|
||||
params_to_update[key] = kwargs[key]
|
||||
return params_to_update, diff(existing_mediatype, kwargs)
|
||||
|
||||
|
||||
def delete_mediatype(module, zbx, mediatype_id):
|
||||
try:
|
||||
return zbx.mediatype.delete([mediatype_id])
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to delete mediatype '{_id}': {e}".format(_id=mediatype_id, e=e))
|
||||
|
||||
|
||||
def update_mediatype(module, zbx, **kwargs):
|
||||
try:
|
||||
mediatype_id = zbx.mediatype.update(kwargs)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to update mediatype '{_id}': {e}".format(_id=kwargs['mediatypeid'], e=e))
|
||||
|
||||
|
||||
def create_mediatype(module, zbx, **kwargs):
|
||||
try:
|
||||
mediatype_id = zbx.mediatype.create(kwargs)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to create mediatype '{name}': {e}".format(name=kwargs['description'], e=e))
|
||||
|
||||
|
||||
def main():
|
||||
argument_spec = dict(
|
||||
server_url=dict(type='str', required=True, aliases=['url']),
|
||||
login_user=dict(type='str', required=True),
|
||||
login_password=dict(type='str', required=True, no_log=True),
|
||||
http_login_user=dict(type='str', required=False, default=None),
|
||||
http_login_password=dict(type='str', required=False, default=None, no_log=True),
|
||||
validate_certs=dict(type='bool', required=False, default=True), timeout=dict(type='int', default=10),
|
||||
name=dict(type='str', required=True),
|
||||
state=dict(type='str', default='present', choices=['present', 'absent']),
|
||||
type=dict(type='str', choices=['email', 'script', 'sms', 'jabber', 'ez_texting'], required=True),
|
||||
status=dict(type='str', default='enabled', choices=['enabled', 'disabled'], required=False),
|
||||
max_sessions=dict(type='int', default=1, required=False),
|
||||
max_attempts=dict(type='int', default=3, required=False),
|
||||
attempt_interval=dict(type='int', default=10, required=False),
|
||||
# Script
|
||||
script_name=dict(type='str', required=False),
|
||||
script_params=dict(type='list', required=False),
|
||||
# SMS
|
||||
gsm_modem=dict(type='str', required=False),
|
||||
# Jabber
|
||||
username=dict(type='str', required=False),
|
||||
password=dict(type='str', required=False, no_log=True),
|
||||
# Email
|
||||
smtp_server=dict(type='str', default='localhost', required=False),
|
||||
smtp_server_port=dict(type='int', default=25, required=False),
|
||||
smtp_helo=dict(type='str', default='localhost', required=False),
|
||||
smtp_email=dict(type='str', required=False),
|
||||
smtp_security=dict(type='str', required=False, choices=['None', 'STARTTLS', 'SSL/TLS']),
|
||||
smtp_authentication=dict(type='bool', default=False, required=False),
|
||||
smtp_verify_host=dict(type='bool', default=False, required=False),
|
||||
smtp_verify_peer=dict(type='bool', default=False, required=False),
|
||||
# EZ Text
|
||||
message_text_limit=dict(type='str', required=False, choices=['USA', 'Canada'])
|
||||
)
|
||||
|
||||
required_params = [
|
||||
['type', 'email', ['smtp_email']],
|
||||
['type', 'script', ['script_name']],
|
||||
['type', 'sms', ['gsm_modem']],
|
||||
['type', 'jabber', ['username', 'password']],
|
||||
['type', 'ez_texting', ['username', 'password', 'message_text_limit']],
|
||||
['smtp_authentication', True, ['username', 'password']]
|
||||
]
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if module.params['state'] == 'present':
|
||||
validate_params(module, required_params)
|
||||
|
||||
if not HAS_ZABBIX_API:
|
||||
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
|
||||
|
||||
server_url = module.params['server_url']
|
||||
login_user = module.params['login_user']
|
||||
login_password = module.params['login_password']
|
||||
http_login_user = module.params['http_login_user']
|
||||
http_login_password = module.params['http_login_password']
|
||||
validate_certs = module.params['validate_certs']
|
||||
state = module.params['state']
|
||||
timeout = module.params['timeout']
|
||||
name = module.params['name']
|
||||
transport_type = module.params['type']
|
||||
status = module.params['status']
|
||||
max_sessions = module.params['max_sessions']
|
||||
max_attempts = module.params['max_attempts']
|
||||
attempt_interval = module.params['attempt_interval']
|
||||
# Script
|
||||
script_name = module.params['script_name']
|
||||
script_params = module.params['script_params']
|
||||
# SMS
|
||||
gsm_modem = module.params['gsm_modem']
|
||||
# Jabber
|
||||
username = module.params['username']
|
||||
password = module.params['password']
|
||||
# Email
|
||||
smtp_server = module.params['smtp_server']
|
||||
smtp_server_port = module.params['smtp_server_port']
|
||||
smtp_helo = module.params['smtp_helo']
|
||||
smtp_email = module.params['smtp_email']
|
||||
smtp_security = module.params['smtp_security']
|
||||
smtp_authentication = module.params['smtp_authentication']
|
||||
smtp_verify_host = module.params['smtp_verify_host']
|
||||
smtp_verify_peer = module.params['smtp_verify_peer']
|
||||
# EZ Text
|
||||
message_text_limit = module.params['message_text_limit']
|
||||
|
||||
zbx = None
|
||||
|
||||
# login to zabbix
|
||||
try:
|
||||
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
|
||||
validate_certs=validate_certs)
|
||||
zbx.login(login_user, login_password)
|
||||
atexit.register(zbx.logout)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
|
||||
|
||||
zbx_api_version = zbx.api_version()[:3]
|
||||
mediatype_exists, mediatype_id = check_if_mediatype_exists(module, zbx, name, zbx_api_version)
|
||||
|
||||
parameters = construct_parameters(
|
||||
name=name,
|
||||
transport_type=transport_type,
|
||||
status=status,
|
||||
max_sessions=max_sessions,
|
||||
max_attempts=max_attempts,
|
||||
attempt_interval=attempt_interval,
|
||||
script_name=script_name,
|
||||
script_params=script_params,
|
||||
gsm_modem=gsm_modem,
|
||||
username=username,
|
||||
password=password,
|
||||
smtp_server=smtp_server,
|
||||
smtp_server_port=smtp_server_port,
|
||||
smtp_helo=smtp_helo,
|
||||
smtp_email=smtp_email,
|
||||
smtp_security=smtp_security,
|
||||
smtp_authentication=smtp_authentication,
|
||||
smtp_verify_host=smtp_verify_host,
|
||||
smtp_verify_peer=smtp_verify_peer,
|
||||
message_text_limit=message_text_limit,
|
||||
zbx_api_version=zbx_api_version
|
||||
)
|
||||
|
||||
if 'unsupported_parameter' in parameters:
|
||||
module.fail_json(msg="%s is unsupported for Zabbix version %s" % (parameters['unsupported_parameter'], parameters['zbx_api_version']))
|
||||
|
||||
if LooseVersion(zbx_api_version) >= LooseVersion('4.4'):
|
||||
# description key changed to name key from zabbix 4.4
|
||||
parameters['name'] = parameters.pop('description')
|
||||
|
||||
if mediatype_exists:
|
||||
if state == 'absent':
|
||||
if module.check_mode:
|
||||
module.exit_json(
|
||||
changed=True,
|
||||
msg="Mediatype would have been deleted. Name: {name}, ID: {_id}".format(
|
||||
name=name,
|
||||
_id=mediatype_id
|
||||
)
|
||||
)
|
||||
mediatype_id = delete_mediatype(module, zbx, mediatype_id)
|
||||
module.exit_json(
|
||||
changed=True,
|
||||
msg="Mediatype deleted. Name: {name}, ID: {_id}".format(
|
||||
name=name,
|
||||
_id=mediatype_id
|
||||
)
|
||||
)
|
||||
else:
|
||||
params_to_update, diff = get_update_params(module, zbx, mediatype_id, **parameters)
|
||||
if params_to_update == {}:
|
||||
module.exit_json(
|
||||
changed=False,
|
||||
msg="Mediatype is up to date: {name}".format(name=name)
|
||||
)
|
||||
else:
|
||||
if module.check_mode:
|
||||
module.exit_json(
|
||||
changed=True,
|
||||
diff=diff,
|
||||
msg="Mediatype would have been updated. Name: {name}, ID: {_id}".format(
|
||||
name=name,
|
||||
_id=mediatype_id
|
||||
)
|
||||
)
|
||||
mediatype_id = update_mediatype(
|
||||
module, zbx,
|
||||
mediatypeid=mediatype_id,
|
||||
**params_to_update
|
||||
)
|
||||
module.exit_json(
|
||||
changed=True,
|
||||
diff=diff,
|
||||
msg="Mediatype updated. Name: {name}, ID: {_id}".format(
|
||||
name=name,
|
||||
_id=mediatype_id
|
||||
)
|
||||
)
|
||||
else:
|
||||
if state == "absent":
|
||||
module.exit_json(changed=False)
|
||||
else:
|
||||
if module.check_mode:
|
||||
module.exit_json(
|
||||
changed=True,
|
||||
msg="Mediatype would have been created. Name: {name}, ID: {_id}".format(
|
||||
name=name,
|
||||
_id=mediatype_id
|
||||
)
|
||||
)
|
||||
mediatype_id = create_mediatype(module, zbx, **parameters)
|
||||
module.exit_json(
|
||||
changed=True,
|
||||
msg="Mediatype created: {name}, ID: {_id}".format(
|
||||
name=name,
|
||||
_id=mediatype_id
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
471
plugins/modules/monitoring/zabbix/zabbix_proxy.py
Normal file
471
plugins/modules/monitoring/zabbix/zabbix_proxy.py
Normal file
@@ -0,0 +1,471 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
# (c) 2017, Alen Komic
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: zabbix_proxy
|
||||
short_description: Create/delete/get/update Zabbix proxies
|
||||
description:
|
||||
- This module allows you to create, modify, get and delete Zabbix proxy entries.
|
||||
author:
|
||||
- "Alen Komic (@akomic)"
|
||||
requirements:
|
||||
- "python >= 2.6"
|
||||
- "zabbix-api >= 0.5.4"
|
||||
options:
|
||||
proxy_name:
|
||||
description:
|
||||
- Name of the proxy in Zabbix.
|
||||
required: true
|
||||
type: str
|
||||
proxy_address:
|
||||
description:
|
||||
- Comma-delimited list of IP/CIDR addresses or DNS names to accept active proxy requests from.
|
||||
- Requires I(status=active).
|
||||
- Works only with >= Zabbix 4.0. ( remove option for <= 4.0 )
|
||||
required: false
|
||||
type: str
|
||||
description:
|
||||
description:
|
||||
- Description of the proxy.
|
||||
required: false
|
||||
type: str
|
||||
status:
|
||||
description:
|
||||
- Type of proxy. (4 - active, 5 - passive)
|
||||
required: false
|
||||
choices: ['active', 'passive']
|
||||
default: "active"
|
||||
type: str
|
||||
tls_connect:
|
||||
description:
|
||||
- Connections to proxy.
|
||||
required: false
|
||||
choices: ['no_encryption','PSK','certificate']
|
||||
default: 'no_encryption'
|
||||
type: str
|
||||
tls_accept:
|
||||
description:
|
||||
- Connections from proxy.
|
||||
required: false
|
||||
choices: ['no_encryption','PSK','certificate']
|
||||
default: 'no_encryption'
|
||||
type: str
|
||||
ca_cert:
|
||||
description:
|
||||
- Certificate issuer.
|
||||
required: false
|
||||
aliases: [ tls_issuer ]
|
||||
type: str
|
||||
tls_subject:
|
||||
description:
|
||||
- Certificate subject.
|
||||
required: false
|
||||
type: str
|
||||
tls_psk_identity:
|
||||
description:
|
||||
- PSK identity. Required if either I(tls_connect) or I(tls_accept) has PSK enabled.
|
||||
required: false
|
||||
type: str
|
||||
tls_psk:
|
||||
description:
|
||||
- The preshared key, at least 32 hex digits. Required if either I(tls_connect) or I(tls_accept) has PSK enabled.
|
||||
required: false
|
||||
type: str
|
||||
state:
|
||||
description:
|
||||
- State of the proxy.
|
||||
- On C(present), it will create if proxy does not exist or update the proxy if the associated data is different.
|
||||
- On C(absent) will remove a proxy if it exists.
|
||||
required: false
|
||||
choices: ['present', 'absent']
|
||||
default: "present"
|
||||
type: str
|
||||
interface:
|
||||
description:
|
||||
- Dictionary with params for the interface when proxy is in passive mode.
|
||||
- For more information, review proxy interface documentation at
|
||||
- U(https://www.zabbix.com/documentation/4.0/manual/api/reference/proxy/object#proxy_interface).
|
||||
required: false
|
||||
suboptions:
|
||||
useip:
|
||||
type: int
|
||||
description:
|
||||
- Connect to proxy interface with IP address instead of DNS name.
|
||||
- 0 (don't use ip), 1 (use ip).
|
||||
default: 0
|
||||
choices: [0, 1]
|
||||
ip:
|
||||
type: str
|
||||
description:
|
||||
- IP address used by proxy interface.
|
||||
- Required if I(useip=1).
|
||||
default: ''
|
||||
dns:
|
||||
type: str
|
||||
description:
|
||||
- DNS name of the proxy interface.
|
||||
- Required if I(useip=0).
|
||||
default: ''
|
||||
port:
|
||||
type: str
|
||||
description:
|
||||
- Port used by proxy interface.
|
||||
default: '10051'
|
||||
type:
|
||||
type: int
|
||||
description:
|
||||
- Interface type to add.
|
||||
- This suboption is currently ignored for Zabbix proxy.
|
||||
- This suboption is deprecated since Ansible 2.10 and will eventually be removed in 2.14.
|
||||
required: false
|
||||
default: 0
|
||||
main:
|
||||
type: int
|
||||
description:
|
||||
- Whether the interface is used as default.
|
||||
- This suboption is currently ignored for Zabbix proxy.
|
||||
- This suboption is deprecated since Ansible 2.10 and will eventually be removed in 2.14.
|
||||
required: false
|
||||
default: 0
|
||||
default: {}
|
||||
type: dict
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.zabbix
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: Create or update a proxy with proxy type active
|
||||
local_action:
|
||||
module: zabbix_proxy
|
||||
server_url: http://monitor.example.com
|
||||
login_user: username
|
||||
login_password: password
|
||||
proxy_name: ExampleProxy
|
||||
description: ExampleProxy
|
||||
status: active
|
||||
state: present
|
||||
proxy_address: ExampleProxy.local
|
||||
|
||||
- name: Create a new passive proxy using only it's IP
|
||||
local_action:
|
||||
module: zabbix_proxy
|
||||
server_url: http://monitor.example.com
|
||||
login_user: username
|
||||
login_password: password
|
||||
proxy_name: ExampleProxy
|
||||
description: ExampleProxy
|
||||
status: passive
|
||||
state: present
|
||||
interface:
|
||||
useip: 1
|
||||
ip: 10.1.1.2
|
||||
port: 10051
|
||||
|
||||
- name: Create a new passive proxy using only it's DNS
|
||||
local_action:
|
||||
module: zabbix_proxy
|
||||
server_url: http://monitor.example.com
|
||||
login_user: username
|
||||
login_password: password
|
||||
proxy_name: ExampleProxy
|
||||
description: ExampleProxy
|
||||
status: passive
|
||||
state: present
|
||||
interface:
|
||||
dns: proxy.example.com
|
||||
port: 10051
|
||||
'''
|
||||
|
||||
RETURN = r''' # '''
|
||||
|
||||
|
||||
import traceback
|
||||
import atexit
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
try:
|
||||
from zabbix_api import ZabbixAPI
|
||||
|
||||
HAS_ZABBIX_API = True
|
||||
except ImportError:
|
||||
ZBX_IMP_ERR = traceback.format_exc()
|
||||
HAS_ZABBIX_API = False
|
||||
|
||||
|
||||
class Proxy(object):
|
||||
def __init__(self, module, zbx):
|
||||
self._module = module
|
||||
self._zapi = zbx
|
||||
self.existing_data = None
|
||||
|
||||
def proxy_exists(self, proxy_name):
|
||||
result = self._zapi.proxy.get({
|
||||
'output': 'extend', 'selectInterface': 'extend',
|
||||
'filter': {'host': proxy_name}})
|
||||
|
||||
if len(result) > 0 and 'proxyid' in result[0]:
|
||||
self.existing_data = result[0]
|
||||
return result[0]['proxyid']
|
||||
else:
|
||||
return result
|
||||
|
||||
def add_proxy(self, data):
|
||||
try:
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
|
||||
parameters = {}
|
||||
for item in data:
|
||||
if data[item]:
|
||||
parameters[item] = data[item]
|
||||
|
||||
if 'proxy_address' in data and data['status'] != '5':
|
||||
parameters.pop('proxy_address', False)
|
||||
|
||||
if 'interface' in data and data['status'] != '6':
|
||||
parameters.pop('interface', False)
|
||||
|
||||
proxy_ids_list = self._zapi.proxy.create(parameters)
|
||||
self._module.exit_json(changed=True,
|
||||
result="Successfully added proxy %s (%s)" %
|
||||
(data['host'], data['status']))
|
||||
if len(proxy_ids_list) >= 1:
|
||||
return proxy_ids_list['proxyids'][0]
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to create proxy %s: %s" %
|
||||
(data['host'], e))
|
||||
|
||||
def delete_proxy(self, proxy_id, proxy_name):
|
||||
try:
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
self._zapi.proxy.delete([proxy_id])
|
||||
self._module.exit_json(changed=True,
|
||||
result="Successfully deleted"
|
||||
+ " proxy %s" % proxy_name)
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to delete proxy %s: %s" %
|
||||
(proxy_name, str(e)))
|
||||
|
||||
def compile_interface_params(self, new_interface):
|
||||
old_interface = {}
|
||||
if 'interface' in self.existing_data and \
|
||||
len(self.existing_data['interface']) > 0:
|
||||
old_interface = self.existing_data['interface']
|
||||
|
||||
for item in ['type', 'main']:
|
||||
new_interface.pop(item, False)
|
||||
|
||||
final_interface = old_interface.copy()
|
||||
final_interface.update(new_interface)
|
||||
final_interface = dict((k, str(v)) for k, v in final_interface.items())
|
||||
|
||||
if final_interface != old_interface:
|
||||
return final_interface
|
||||
else:
|
||||
return {}
|
||||
|
||||
def update_proxy(self, proxy_id, data):
|
||||
try:
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
parameters = {'proxyid': proxy_id}
|
||||
|
||||
for item in data:
|
||||
if data[item] and item in self.existing_data and \
|
||||
self.existing_data[item] != data[item]:
|
||||
parameters[item] = data[item]
|
||||
|
||||
if 'interface' in parameters:
|
||||
parameters.pop('interface')
|
||||
|
||||
if 'proxy_address' in data and data['status'] != '5':
|
||||
parameters.pop('proxy_address', False)
|
||||
|
||||
if 'interface' in data and data['status'] != '6':
|
||||
parameters.pop('interface', False)
|
||||
|
||||
if 'interface' in data and data['status'] == '6':
|
||||
new_interface = self.compile_interface_params(data['interface'])
|
||||
if len(new_interface) > 0:
|
||||
parameters['interface'] = new_interface
|
||||
|
||||
if len(parameters) > 1:
|
||||
self._zapi.proxy.update(parameters)
|
||||
self._module.exit_json(
|
||||
changed=True,
|
||||
result="Successfully updated proxy %s (%s)" %
|
||||
(data['host'], proxy_id)
|
||||
)
|
||||
else:
|
||||
self._module.exit_json(changed=False)
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to update proxy %s: %s" %
|
||||
(data['host'], e))
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
server_url=dict(type='str', required=True, aliases=['url']),
|
||||
login_user=dict(type='str', required=True),
|
||||
login_password=dict(type='str', required=True, no_log=True),
|
||||
proxy_name=dict(type='str', required=True),
|
||||
proxy_address=dict(type='str', required=False),
|
||||
http_login_user=dict(type='str', required=False, default=None),
|
||||
http_login_password=dict(type='str', required=False,
|
||||
default=None, no_log=True),
|
||||
validate_certs=dict(type='bool', required=False, default=True),
|
||||
status=dict(type='str', default="active", choices=['active', 'passive']),
|
||||
state=dict(type='str', default="present", choices=['present', 'absent']),
|
||||
description=dict(type='str', required=False),
|
||||
tls_connect=dict(type='str', default='no_encryption',
|
||||
choices=['no_encryption', 'PSK', 'certificate']),
|
||||
tls_accept=dict(type='str', default='no_encryption',
|
||||
choices=['no_encryption', 'PSK', 'certificate']),
|
||||
ca_cert=dict(type='str', required=False, default=None, aliases=['tls_issuer']),
|
||||
tls_subject=dict(type='str', required=False, default=None),
|
||||
tls_psk_identity=dict(type='str', required=False, default=None),
|
||||
tls_psk=dict(type='str', required=False, default=None),
|
||||
timeout=dict(type='int', default=10),
|
||||
interface=dict(
|
||||
type='dict',
|
||||
required=False,
|
||||
default={},
|
||||
options=dict(
|
||||
useip=dict(type='int', choices=[0, 1], default=0),
|
||||
ip=dict(type='str', default=''),
|
||||
dns=dict(type='str', default=''),
|
||||
port=dict(type='str', default='10051'),
|
||||
type=dict(type='int', default=0, removed_in_version='2.14'),
|
||||
main=dict(type='int', default=0, removed_in_version='2.14')
|
||||
),
|
||||
)
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if not HAS_ZABBIX_API:
|
||||
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
|
||||
|
||||
server_url = module.params['server_url']
|
||||
login_user = module.params['login_user']
|
||||
login_password = module.params['login_password']
|
||||
http_login_user = module.params['http_login_user']
|
||||
http_login_password = module.params['http_login_password']
|
||||
validate_certs = module.params['validate_certs']
|
||||
proxy_name = module.params['proxy_name']
|
||||
proxy_address = module.params['proxy_address']
|
||||
description = module.params['description']
|
||||
status = module.params['status']
|
||||
tls_connect = module.params['tls_connect']
|
||||
tls_accept = module.params['tls_accept']
|
||||
tls_issuer = module.params['ca_cert']
|
||||
tls_subject = module.params['tls_subject']
|
||||
tls_psk_identity = module.params['tls_psk_identity']
|
||||
tls_psk = module.params['tls_psk']
|
||||
state = module.params['state']
|
||||
timeout = module.params['timeout']
|
||||
interface = module.params['interface']
|
||||
|
||||
# convert enabled to 0; disabled to 1
|
||||
status = 6 if status == "passive" else 5
|
||||
|
||||
if tls_connect == 'certificate':
|
||||
tls_connect = 4
|
||||
elif tls_connect == 'PSK':
|
||||
tls_connect = 2
|
||||
else:
|
||||
tls_connect = 1
|
||||
|
||||
if tls_accept == 'certificate':
|
||||
tls_accept = 4
|
||||
elif tls_accept == 'PSK':
|
||||
tls_accept = 2
|
||||
else:
|
||||
tls_accept = 1
|
||||
|
||||
zbx = None
|
||||
# login to zabbix
|
||||
try:
|
||||
zbx = ZabbixAPI(server_url, timeout=timeout,
|
||||
user=http_login_user,
|
||||
passwd=http_login_password,
|
||||
validate_certs=validate_certs)
|
||||
zbx.login(login_user, login_password)
|
||||
atexit.register(zbx.logout)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
|
||||
|
||||
proxy = Proxy(module, zbx)
|
||||
|
||||
# check if proxy already exists
|
||||
proxy_id = proxy.proxy_exists(proxy_name)
|
||||
|
||||
if proxy_id:
|
||||
if state == "absent":
|
||||
# remove proxy
|
||||
proxy.delete_proxy(proxy_id, proxy_name)
|
||||
else:
|
||||
proxy.update_proxy(proxy_id, {
|
||||
'host': proxy_name,
|
||||
'description': description,
|
||||
'status': str(status),
|
||||
'tls_connect': str(tls_connect),
|
||||
'tls_accept': str(tls_accept),
|
||||
'tls_issuer': tls_issuer,
|
||||
'tls_subject': tls_subject,
|
||||
'tls_psk_identity': tls_psk_identity,
|
||||
'tls_psk': tls_psk,
|
||||
'interface': interface,
|
||||
'proxy_address': proxy_address
|
||||
})
|
||||
else:
|
||||
if state == "absent":
|
||||
# the proxy is already deleted.
|
||||
module.exit_json(changed=False)
|
||||
|
||||
proxy_id = proxy.add_proxy(data={
|
||||
'host': proxy_name,
|
||||
'description': description,
|
||||
'status': str(status),
|
||||
'tls_connect': str(tls_connect),
|
||||
'tls_accept': str(tls_accept),
|
||||
'tls_issuer': tls_issuer,
|
||||
'tls_subject': tls_subject,
|
||||
'tls_psk_identity': tls_psk_identity,
|
||||
'tls_psk': tls_psk,
|
||||
'interface': interface,
|
||||
'proxy_address': proxy_address
|
||||
})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
471
plugins/modules/monitoring/zabbix/zabbix_screen.py
Normal file
471
plugins/modules/monitoring/zabbix/zabbix_screen.py
Normal file
@@ -0,0 +1,471 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2013-2014, Epic Games, Inc.
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: zabbix_screen
|
||||
short_description: Create/update/delete Zabbix screens
|
||||
description:
|
||||
- This module allows you to create, modify and delete Zabbix screens and associated graph data.
|
||||
author:
|
||||
- "Cove (@cove)"
|
||||
- "Tony Minfei Ding (!UNKNOWN)"
|
||||
- "Harrison Gu (@harrisongu)"
|
||||
requirements:
|
||||
- "python >= 2.6"
|
||||
- "zabbix-api >= 0.5.4"
|
||||
options:
|
||||
screens:
|
||||
description:
|
||||
- List of screens to be created/updated/deleted (see example).
|
||||
type: list
|
||||
elements: dict
|
||||
required: true
|
||||
suboptions:
|
||||
screen_name:
|
||||
description:
|
||||
- Screen name will be used.
|
||||
- If a screen has already been added, the screen name won't be updated.
|
||||
type: str
|
||||
required: true
|
||||
host_group:
|
||||
description:
|
||||
- Host group will be used for searching hosts.
|
||||
- Required if I(state=present).
|
||||
type: str
|
||||
state:
|
||||
description:
|
||||
- I(present) - Create a screen if it doesn't exist. If the screen already exists, the screen will be updated as needed.
|
||||
- I(absent) - If a screen exists, the screen will be deleted.
|
||||
type: str
|
||||
default: present
|
||||
choices:
|
||||
- absent
|
||||
- present
|
||||
graph_names:
|
||||
description:
|
||||
- Graph names will be added to a screen. Case insensitive.
|
||||
- Required if I(state=present).
|
||||
type: list
|
||||
elements: str
|
||||
graph_width:
|
||||
description:
|
||||
- Graph width will be set in graph settings.
|
||||
type: int
|
||||
graph_height:
|
||||
description:
|
||||
- Graph height will be set in graph settings.
|
||||
type: int
|
||||
graphs_in_row:
|
||||
description:
|
||||
- Limit columns of a screen and make multiple rows.
|
||||
type: int
|
||||
default: 3
|
||||
sort:
|
||||
description:
|
||||
- Sort hosts alphabetically.
|
||||
- If there are numbers in hostnames, leading zero should be used.
|
||||
type: bool
|
||||
default: no
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.zabbix
|
||||
|
||||
|
||||
notes:
|
||||
- Too many concurrent updates to the same screen may cause Zabbix to return errors, see examples for a workaround if needed.
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
# Create/update a screen.
|
||||
- name: Create a new screen or update an existing screen's items 5 in a row
|
||||
local_action:
|
||||
module: zabbix_screen
|
||||
server_url: http://monitor.example.com
|
||||
login_user: username
|
||||
login_password: password
|
||||
screens:
|
||||
- screen_name: ExampleScreen1
|
||||
host_group: Example group1
|
||||
state: present
|
||||
graph_names:
|
||||
- Example graph1
|
||||
- Example graph2
|
||||
graph_width: 200
|
||||
graph_height: 100
|
||||
graphs_in_row: 5
|
||||
|
||||
# Create/update multi-screen
|
||||
- name: Create two of new screens or update the existing screens' items
|
||||
local_action:
|
||||
module: zabbix_screen
|
||||
server_url: http://monitor.example.com
|
||||
login_user: username
|
||||
login_password: password
|
||||
screens:
|
||||
- screen_name: ExampleScreen1
|
||||
host_group: Example group1
|
||||
state: present
|
||||
graph_names:
|
||||
- Example graph1
|
||||
- Example graph2
|
||||
graph_width: 200
|
||||
graph_height: 100
|
||||
- screen_name: ExampleScreen2
|
||||
host_group: Example group2
|
||||
state: present
|
||||
graph_names:
|
||||
- Example graph1
|
||||
- Example graph2
|
||||
graph_width: 200
|
||||
graph_height: 100
|
||||
|
||||
# Limit the Zabbix screen creations to one host since Zabbix can return an error when doing concurrent updates
|
||||
- name: Create a new screen or update an existing screen's items
|
||||
local_action:
|
||||
module: zabbix_screen
|
||||
server_url: http://monitor.example.com
|
||||
login_user: username
|
||||
login_password: password
|
||||
state: present
|
||||
screens:
|
||||
- screen_name: ExampleScreen
|
||||
host_group: Example group
|
||||
state: present
|
||||
graph_names:
|
||||
- Example graph1
|
||||
- Example graph2
|
||||
graph_width: 200
|
||||
graph_height: 100
|
||||
when: inventory_hostname==groups['group_name'][0]
|
||||
'''
|
||||
|
||||
|
||||
import atexit
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from zabbix_api import ZabbixAPI
|
||||
from zabbix_api import ZabbixAPIException
|
||||
from zabbix_api import Already_Exists
|
||||
|
||||
HAS_ZABBIX_API = True
|
||||
except ImportError:
|
||||
ZBX_IMP_ERR = traceback.format_exc()
|
||||
HAS_ZABBIX_API = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
|
||||
|
||||
class Screen(object):
|
||||
def __init__(self, module, zbx):
|
||||
self._module = module
|
||||
self._zapi = zbx
|
||||
|
||||
# get group id by group name
|
||||
def get_host_group_id(self, group_name):
|
||||
if group_name == "":
|
||||
self._module.fail_json(msg="group_name is required")
|
||||
hostGroup_list = self._zapi.hostgroup.get({'output': 'extend', 'filter': {'name': group_name}})
|
||||
if len(hostGroup_list) < 1:
|
||||
self._module.fail_json(msg="Host group not found: %s" % group_name)
|
||||
else:
|
||||
hostGroup_id = hostGroup_list[0]['groupid']
|
||||
return hostGroup_id
|
||||
|
||||
# get monitored host_id by host_group_id
|
||||
def get_host_ids_by_group_id(self, group_id, sort):
|
||||
host_list = self._zapi.host.get({'output': 'extend', 'groupids': group_id, 'monitored_hosts': 1})
|
||||
if len(host_list) < 1:
|
||||
self._module.fail_json(msg="No host in the group.")
|
||||
else:
|
||||
if sort:
|
||||
host_list = sorted(host_list, key=lambda name: name['name'])
|
||||
host_ids = []
|
||||
for i in host_list:
|
||||
host_id = i['hostid']
|
||||
host_ids.append(host_id)
|
||||
return host_ids
|
||||
|
||||
# get screen
|
||||
def get_screen_id(self, screen_name):
|
||||
if screen_name == "":
|
||||
self._module.fail_json(msg="screen_name is required")
|
||||
try:
|
||||
screen_id_list = self._zapi.screen.get({'output': 'extend', 'search': {"name": screen_name}})
|
||||
if len(screen_id_list) >= 1:
|
||||
screen_id = screen_id_list[0]['screenid']
|
||||
return screen_id
|
||||
return None
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to get screen %s from Zabbix: %s" % (screen_name, e))
|
||||
|
||||
# create screen
|
||||
def create_screen(self, screen_name, h_size, v_size):
|
||||
try:
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
screen = self._zapi.screen.create({'name': screen_name, 'hsize': h_size, 'vsize': v_size})
|
||||
return screen['screenids'][0]
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to create screen %s: %s" % (screen_name, e))
|
||||
|
||||
# update screen
|
||||
def update_screen(self, screen_id, screen_name, h_size, v_size):
|
||||
try:
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
self._zapi.screen.update({'screenid': screen_id, 'hsize': h_size, 'vsize': v_size})
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to update screen %s: %s" % (screen_name, e))
|
||||
|
||||
# delete screen
|
||||
def delete_screen(self, screen_id, screen_name):
|
||||
try:
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
self._zapi.screen.delete([screen_id])
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to delete screen %s: %s" % (screen_name, e))
|
||||
|
||||
# get graph ids
|
||||
def get_graph_ids(self, hosts, graph_name_list):
|
||||
graph_id_lists = []
|
||||
vsize = 1
|
||||
for host in hosts:
|
||||
graph_id_list = self.get_graphs_by_host_id(graph_name_list, host)
|
||||
size = len(graph_id_list)
|
||||
if size > 0:
|
||||
graph_id_lists.extend(graph_id_list)
|
||||
if vsize < size:
|
||||
vsize = size
|
||||
return graph_id_lists, vsize
|
||||
|
||||
# getGraphs
|
||||
def get_graphs_by_host_id(self, graph_name_list, host_id):
|
||||
graph_ids = []
|
||||
for graph_name in graph_name_list:
|
||||
graphs_list = self._zapi.graph.get({'output': 'extend', 'search': {'name': graph_name}, 'hostids': host_id})
|
||||
graph_id_list = []
|
||||
if len(graphs_list) > 0:
|
||||
for graph in graphs_list:
|
||||
graph_id = graph['graphid']
|
||||
graph_id_list.append(graph_id)
|
||||
if len(graph_id_list) > 0:
|
||||
graph_ids.extend(graph_id_list)
|
||||
return graph_ids
|
||||
|
||||
# get screen items
|
||||
def get_screen_items(self, screen_id):
|
||||
screen_item_list = self._zapi.screenitem.get({'output': 'extend', 'screenids': screen_id})
|
||||
return screen_item_list
|
||||
|
||||
# delete screen items
|
||||
def delete_screen_items(self, screen_id, screen_item_id_list):
|
||||
try:
|
||||
if len(screen_item_id_list) == 0:
|
||||
return True
|
||||
screen_item_list = self.get_screen_items(screen_id)
|
||||
if len(screen_item_list) > 0:
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
self._zapi.screenitem.delete(screen_item_id_list)
|
||||
return True
|
||||
return False
|
||||
except ZabbixAPIException:
|
||||
pass
|
||||
|
||||
# get screen's hsize and vsize
|
||||
def get_hsize_vsize(self, hosts, v_size, graphs_in_row):
|
||||
h_size = len(hosts)
|
||||
# when there is only one host, put all graphs in a row
|
||||
if h_size == 1:
|
||||
if v_size <= graphs_in_row:
|
||||
h_size = v_size
|
||||
else:
|
||||
h_size = graphs_in_row
|
||||
v_size = (v_size - 1) // h_size + 1
|
||||
# when len(hosts) is more then graphs_in_row
|
||||
elif len(hosts) > graphs_in_row:
|
||||
h_size = graphs_in_row
|
||||
v_size = (len(hosts) // graphs_in_row + 1) * v_size
|
||||
|
||||
return h_size, v_size
|
||||
|
||||
# create screen_items
|
||||
def create_screen_items(self, screen_id, hosts, graph_name_list, width, height, h_size, graphs_in_row):
|
||||
if len(hosts) < 4:
|
||||
if width is None or width < 0:
|
||||
width = 500
|
||||
else:
|
||||
if width is None or width < 0:
|
||||
width = 200
|
||||
if height is None or height < 0:
|
||||
height = 100
|
||||
|
||||
try:
|
||||
# when there're only one host, only one row is not good.
|
||||
if len(hosts) == 1:
|
||||
graph_id_list = self.get_graphs_by_host_id(graph_name_list, hosts[0])
|
||||
for i, graph_id in enumerate(graph_id_list):
|
||||
if graph_id is not None:
|
||||
self._zapi.screenitem.create({'screenid': screen_id, 'resourcetype': 0, 'resourceid': graph_id,
|
||||
'width': width, 'height': height,
|
||||
'x': i % h_size, 'y': i // h_size, 'colspan': 1, 'rowspan': 1,
|
||||
'elements': 0, 'valign': 0, 'halign': 0,
|
||||
'style': 0, 'dynamic': 0, 'sort_triggers': 0})
|
||||
else:
|
||||
for i, host in enumerate(hosts):
|
||||
graph_id_list = self.get_graphs_by_host_id(graph_name_list, host)
|
||||
for j, graph_id in enumerate(graph_id_list):
|
||||
if graph_id is not None:
|
||||
self._zapi.screenitem.create({'screenid': screen_id, 'resourcetype': 0, 'resourceid': graph_id,
|
||||
'width': width, 'height': height,
|
||||
'x': i % graphs_in_row, 'y': len(graph_id_list) * (i // graphs_in_row) + j,
|
||||
'colspan': 1, 'rowspan': 1,
|
||||
'elements': 0, 'valign': 0, 'halign': 0,
|
||||
'style': 0, 'dynamic': 0, 'sort_triggers': 0})
|
||||
except Already_Exists:
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
server_url=dict(type='str', required=True, aliases=['url']),
|
||||
login_user=dict(type='str', required=True),
|
||||
login_password=dict(type='str', required=True, no_log=True),
|
||||
http_login_user=dict(type='str', required=False, default=None),
|
||||
http_login_password=dict(type='str', required=False, default=None, no_log=True),
|
||||
validate_certs=dict(type='bool', required=False, default=True),
|
||||
timeout=dict(type='int', default=10),
|
||||
screens=dict(
|
||||
type='list',
|
||||
elements='dict',
|
||||
required=True,
|
||||
options=dict(
|
||||
screen_name=dict(type='str', required=True),
|
||||
host_group=dict(type='str'),
|
||||
state=dict(type='str', default='present', choices=['absent', 'present']),
|
||||
graph_names=dict(type='list', elements='str'),
|
||||
graph_width=dict(type='int', default=None),
|
||||
graph_height=dict(type='int', default=None),
|
||||
graphs_in_row=dict(type='int', default=3),
|
||||
sort=dict(default=False, type='bool'),
|
||||
),
|
||||
required_if=[
|
||||
['state', 'present', ['host_group']]
|
||||
]
|
||||
)
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if not HAS_ZABBIX_API:
|
||||
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
|
||||
|
||||
server_url = module.params['server_url']
|
||||
login_user = module.params['login_user']
|
||||
login_password = module.params['login_password']
|
||||
http_login_user = module.params['http_login_user']
|
||||
http_login_password = module.params['http_login_password']
|
||||
validate_certs = module.params['validate_certs']
|
||||
timeout = module.params['timeout']
|
||||
screens = module.params['screens']
|
||||
|
||||
zbx = None
|
||||
# login to zabbix
|
||||
try:
|
||||
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
|
||||
validate_certs=validate_certs)
|
||||
zbx.login(login_user, login_password)
|
||||
atexit.register(zbx.logout)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
|
||||
|
||||
screen = Screen(module, zbx)
|
||||
created_screens = []
|
||||
changed_screens = []
|
||||
deleted_screens = []
|
||||
|
||||
for zabbix_screen in screens:
|
||||
screen_name = zabbix_screen['screen_name']
|
||||
screen_id = screen.get_screen_id(screen_name)
|
||||
state = zabbix_screen['state']
|
||||
sort = zabbix_screen['sort']
|
||||
|
||||
if state == "absent":
|
||||
if screen_id:
|
||||
screen_item_list = screen.get_screen_items(screen_id)
|
||||
screen_item_id_list = []
|
||||
for screen_item in screen_item_list:
|
||||
screen_item_id = screen_item['screenitemid']
|
||||
screen_item_id_list.append(screen_item_id)
|
||||
screen.delete_screen_items(screen_id, screen_item_id_list)
|
||||
screen.delete_screen(screen_id, screen_name)
|
||||
|
||||
deleted_screens.append(screen_name)
|
||||
else:
|
||||
host_group = zabbix_screen['host_group']
|
||||
graph_names = zabbix_screen['graph_names']
|
||||
graphs_in_row = zabbix_screen['graphs_in_row']
|
||||
graph_width = zabbix_screen['graph_width']
|
||||
graph_height = zabbix_screen['graph_height']
|
||||
host_group_id = screen.get_host_group_id(host_group)
|
||||
hosts = screen.get_host_ids_by_group_id(host_group_id, sort)
|
||||
|
||||
screen_item_id_list = []
|
||||
resource_id_list = []
|
||||
|
||||
graph_ids, v_size = screen.get_graph_ids(hosts, graph_names)
|
||||
h_size, v_size = screen.get_hsize_vsize(hosts, v_size, graphs_in_row)
|
||||
|
||||
if not screen_id:
|
||||
# create screen
|
||||
screen_id = screen.create_screen(screen_name, h_size, v_size)
|
||||
screen.create_screen_items(screen_id, hosts, graph_names, graph_width, graph_height, h_size, graphs_in_row)
|
||||
created_screens.append(screen_name)
|
||||
else:
|
||||
screen_item_list = screen.get_screen_items(screen_id)
|
||||
|
||||
for screen_item in screen_item_list:
|
||||
screen_item_id = screen_item['screenitemid']
|
||||
resource_id = screen_item['resourceid']
|
||||
screen_item_id_list.append(screen_item_id)
|
||||
resource_id_list.append(resource_id)
|
||||
|
||||
# when the screen items changed, then update
|
||||
if graph_ids != resource_id_list:
|
||||
deleted = screen.delete_screen_items(screen_id, screen_item_id_list)
|
||||
if deleted:
|
||||
screen.update_screen(screen_id, screen_name, h_size, v_size)
|
||||
screen.create_screen_items(screen_id, hosts, graph_names, graph_width, graph_height, h_size, graphs_in_row)
|
||||
changed_screens.append(screen_name)
|
||||
|
||||
if created_screens and changed_screens:
|
||||
module.exit_json(changed=True, result="Successfully created screen(s): %s, and updated screen(s): %s" % (",".join(created_screens),
|
||||
",".join(changed_screens)))
|
||||
elif created_screens:
|
||||
module.exit_json(changed=True, result="Successfully created screen(s): %s" % ",".join(created_screens))
|
||||
elif changed_screens:
|
||||
module.exit_json(changed=True, result="Successfully updated screen(s): %s" % ",".join(changed_screens))
|
||||
elif deleted_screens:
|
||||
module.exit_json(changed=True, result="Successfully deleted screen(s): %s" % ",".join(deleted_screens))
|
||||
else:
|
||||
module.exit_json(changed=False)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
291
plugins/modules/monitoring/zabbix/zabbix_service.py
Normal file
291
plugins/modules/monitoring/zabbix/zabbix_service.py
Normal file
@@ -0,0 +1,291 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2019, OVH SAS
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: zabbix_service
|
||||
short_description: Create/update/delete Zabbix service
|
||||
description:
|
||||
- Create/update/delete Zabbix service.
|
||||
author:
|
||||
- "Emmanuel Riviere (@emriver)"
|
||||
requirements:
|
||||
- "python >= 2.7"
|
||||
- "zabbix-api >= 0.5.4"
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
- Name of Zabbix service
|
||||
required: true
|
||||
type: str
|
||||
parent:
|
||||
description:
|
||||
- Name of Zabbix service parent
|
||||
required: false
|
||||
type: str
|
||||
sla:
|
||||
description:
|
||||
- Sla value (i.e 99.99), goodsla in Zabbix API
|
||||
required: false
|
||||
type: float
|
||||
calculate_sla:
|
||||
description:
|
||||
- If yes, calculate the SLA value for this service, showsla in Zabbix API
|
||||
required: false
|
||||
type: bool
|
||||
algorithm:
|
||||
description:
|
||||
- Algorithm used to calculate the sla
|
||||
- C(no), sla is not calculated
|
||||
- C(one_child), problem if at least one child has a problem
|
||||
- C(all_children), problem if all children have problems
|
||||
required: false
|
||||
type: str
|
||||
choices: ["no", "one_child", "all_children"]
|
||||
default: one_child
|
||||
trigger_name:
|
||||
description:
|
||||
- Name of trigger linked to the service
|
||||
required: false
|
||||
type: str
|
||||
trigger_host:
|
||||
description:
|
||||
- Name of host linked to the service
|
||||
required: false
|
||||
type: str
|
||||
state:
|
||||
description:
|
||||
- 'State: present - create/update service; absent - delete service'
|
||||
required: false
|
||||
choices: [present, absent]
|
||||
default: "present"
|
||||
type: str
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.zabbix
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
---
|
||||
# Creates a new Zabbix service
|
||||
- name: Manage services
|
||||
local_action:
|
||||
module: zabbix_service
|
||||
server_url: "https://192.168.1.1"
|
||||
login_user: username
|
||||
login_password: password
|
||||
name: apache2 service
|
||||
sla: 99.99
|
||||
calculate_sla: yes
|
||||
algorithm: one_child
|
||||
trigger_name: apache2 service status
|
||||
trigger_host: webserver01
|
||||
state: present
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
---
|
||||
'''
|
||||
|
||||
import atexit
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
|
||||
|
||||
try:
|
||||
from zabbix_api import ZabbixAPI, ZabbixAPIException
|
||||
HAS_ZABBIX_API = True
|
||||
except ImportError:
|
||||
ZBX_IMP_ERR = traceback.format_exc()
|
||||
HAS_ZABBIX_API = False
|
||||
|
||||
|
||||
class Service(object):
|
||||
def __init__(self, module, zbx):
|
||||
self._module = module
|
||||
self._zapi = zbx
|
||||
|
||||
def get_service_ids(self, service_name):
|
||||
service_ids = []
|
||||
services = self._zapi.service.get({'filter': {'name': service_name}})
|
||||
for service in services:
|
||||
service_ids.append(service['serviceid'])
|
||||
return service_ids
|
||||
|
||||
def delete_service(self, service_ids):
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
self._zapi.service.delete(service_ids)
|
||||
|
||||
def dump_services(self, service_ids):
|
||||
services = self._zapi.service.get({'output': 'extend', 'filter': {'serviceid': service_ids}, 'selectParent': '1'})
|
||||
return services
|
||||
|
||||
def generate_service_config(self, name, parent, sla, calculate_sla, trigger_name, trigger_host, algorithm):
|
||||
algorithms = {'no': '0', 'one_child': '1', 'all_children': '2'}
|
||||
algorithm = algorithms[algorithm]
|
||||
|
||||
if calculate_sla:
|
||||
calculate_sla = 1
|
||||
else:
|
||||
calculate_sla = 0
|
||||
|
||||
# Zabbix api return when no trigger
|
||||
trigger_id = 0
|
||||
if trigger_host and trigger_name:
|
||||
# Retrieving the host to get the trigger
|
||||
hosts = self._zapi.host.get({'filter': {'host': trigger_host}})
|
||||
if not hosts:
|
||||
self._module.fail_json(msg="Target host %s not found" % trigger_host)
|
||||
host_id = hosts[0]['hostid']
|
||||
|
||||
triggers = self._zapi.trigger.get({'filter': {'description': trigger_name}, 'hostids': [host_id]})
|
||||
if not triggers:
|
||||
self._module.fail_json(msg="Trigger %s not found on host %s" % (trigger_name, trigger_host))
|
||||
trigger_id = triggers[0]['triggerid']
|
||||
|
||||
request = {
|
||||
'name': name,
|
||||
'algorithm': algorithm,
|
||||
'showsla': calculate_sla,
|
||||
'sortorder': 1,
|
||||
'goodsla': format(sla, '.4f'), # Sla has 4 decimals
|
||||
'triggerid': trigger_id
|
||||
}
|
||||
|
||||
if parent:
|
||||
parent_ids = self.get_service_ids(parent)
|
||||
if not parent_ids:
|
||||
self._module.fail_json(msg="Parent %s not found" % parent)
|
||||
request['parentid'] = parent_ids[0]
|
||||
return request
|
||||
|
||||
def create_service(self, name, parent, sla, calculate_sla, trigger_name, trigger_host, algorithm):
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
self._zapi.service.create(self.generate_service_config(name, parent, sla, calculate_sla, trigger_name, trigger_host, algorithm))
|
||||
|
||||
def update_service(self, service_id, name, parent, sla, calculate_sla, trigger_name, trigger_host, algorithm):
|
||||
generated_config = self.generate_service_config(name, parent, sla, calculate_sla, trigger_name, trigger_host, algorithm)
|
||||
live_config = self.dump_services(service_id)[0]
|
||||
|
||||
item_to_check = ['name', 'showsla', 'algorithm', 'triggerid', 'sortorder', 'goodsla']
|
||||
change = False
|
||||
for item in item_to_check:
|
||||
if str(generated_config[item]) != str(live_config[item]):
|
||||
change = True
|
||||
|
||||
# In Zabbix 4.0
|
||||
# No parent returns : "parent": []
|
||||
# A parent returns : "parent": { "serviceid": 12 }
|
||||
if 'parentid' in generated_config:
|
||||
if 'serviceid' in live_config['parent']:
|
||||
if generated_config['parentid'] != live_config['parent']['serviceid']:
|
||||
change = True
|
||||
else:
|
||||
change = True
|
||||
elif 'serviceid' in live_config['parent']:
|
||||
change = True
|
||||
|
||||
if not change:
|
||||
self._module.exit_json(changed=False, msg="Service %s up to date" % name)
|
||||
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
generated_config['serviceid'] = service_id
|
||||
self._zapi.service.update(generated_config)
|
||||
self._module.exit_json(changed=True, msg="Service %s updated" % name)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
server_url=dict(type='str', required=True, aliases=['url']),
|
||||
login_user=dict(type='str', required=True),
|
||||
login_password=dict(type='str', required=True, no_log=True),
|
||||
http_login_user=dict(type='str', required=False, default=None),
|
||||
http_login_password=dict(type='str', required=False, default=None, no_log=True),
|
||||
validate_certs=dict(type='bool', required=False, default=True),
|
||||
name=dict(type='str', required=True),
|
||||
parent=dict(type='str', required=False),
|
||||
sla=dict(type='float', required=False),
|
||||
calculate_sla=dict(type='bool', required=False, default=False),
|
||||
algorithm=dict(default='one_child', required=False, choices=['no', 'one_child', 'all_children']),
|
||||
trigger_name=dict(type='str', required=False),
|
||||
trigger_host=dict(type='str', required=False),
|
||||
state=dict(default="present", choices=['present', 'absent']),
|
||||
timeout=dict(type='int', default=10)
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if not HAS_ZABBIX_API:
|
||||
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
|
||||
|
||||
server_url = module.params['server_url']
|
||||
login_user = module.params['login_user']
|
||||
login_password = module.params['login_password']
|
||||
http_login_user = module.params['http_login_user']
|
||||
http_login_password = module.params['http_login_password']
|
||||
validate_certs = module.params['validate_certs']
|
||||
name = module.params['name']
|
||||
parent = module.params['parent']
|
||||
sla = module.params['sla']
|
||||
calculate_sla = module.params['calculate_sla']
|
||||
algorithm = module.params['algorithm']
|
||||
trigger_name = module.params['trigger_name']
|
||||
trigger_host = module.params['trigger_host']
|
||||
state = module.params['state']
|
||||
timeout = module.params['timeout']
|
||||
|
||||
zbx = None
|
||||
|
||||
# Login to zabbix
|
||||
try:
|
||||
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password, validate_certs=validate_certs)
|
||||
zbx.login(login_user, login_password)
|
||||
atexit.register(zbx.logout)
|
||||
except ZabbixAPIException as error:
|
||||
module.fail_json(msg="Failed to connect to Zabbix server: %s" % error)
|
||||
|
||||
# Load service module
|
||||
service = Service(module, zbx)
|
||||
service_ids = service.get_service_ids(name)
|
||||
if service_ids:
|
||||
service_json = service.dump_services(service_ids)
|
||||
|
||||
# Delete service
|
||||
if state == "absent":
|
||||
if not service_ids:
|
||||
module.exit_json(changed=False, msg="Service not found, no change: %s" % name)
|
||||
service.delete_service(service_ids)
|
||||
module.exit_json(changed=True, result="Successfully deleted service(s) %s" % name)
|
||||
|
||||
elif state == "present":
|
||||
if (trigger_name and not trigger_host) or (trigger_host and not trigger_name):
|
||||
module.fail_json(msg="Specify either both trigger_host and trigger_name or none to create or update a service")
|
||||
# Does not exists going to create it
|
||||
if not service_ids:
|
||||
service.create_service(name, parent, sla, calculate_sla, trigger_name, trigger_host, algorithm)
|
||||
module.exit_json(changed=True, msg="Service %s created" % name)
|
||||
# Else we update it if needed
|
||||
else:
|
||||
service.update_service(service_ids[0], name, parent, sla, calculate_sla, trigger_name, trigger_host, algorithm)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
795
plugins/modules/monitoring/zabbix/zabbix_template.py
Normal file
795
plugins/modules/monitoring/zabbix/zabbix_template.py
Normal file
@@ -0,0 +1,795 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2017, sookido
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: zabbix_template
|
||||
short_description: Create/update/delete/dump Zabbix template
|
||||
description:
|
||||
- This module allows you to create, modify, delete and dump Zabbix templates.
|
||||
- Multiple templates can be created or modified at once if passing JSON or XML to module.
|
||||
author:
|
||||
- "sookido (@sookido)"
|
||||
- "Logan Vig (@logan2211)"
|
||||
- "Dusan Matejka (@D3DeFi)"
|
||||
requirements:
|
||||
- "python >= 2.6"
|
||||
- "zabbix-api >= 0.5.4"
|
||||
options:
|
||||
template_name:
|
||||
description:
|
||||
- Name of Zabbix template.
|
||||
- Required when I(template_json) or I(template_xml) are not used.
|
||||
- Mutually exclusive with I(template_json) and I(template_xml).
|
||||
required: false
|
||||
type: str
|
||||
template_json:
|
||||
description:
|
||||
- JSON dump of templates to import.
|
||||
- Multiple templates can be imported this way.
|
||||
- Mutually exclusive with I(template_name) and I(template_xml).
|
||||
required: false
|
||||
type: json
|
||||
template_xml:
|
||||
description:
|
||||
- XML dump of templates to import.
|
||||
- Multiple templates can be imported this way.
|
||||
- You are advised to pass XML structure matching the structure used by your version of Zabbix server.
|
||||
- Custom XML structure can be imported as long as it is valid, but may not yield consistent idempotent
|
||||
results on subsequent runs.
|
||||
- Mutually exclusive with I(template_name) and I(template_json).
|
||||
required: false
|
||||
type: str
|
||||
template_groups:
|
||||
description:
|
||||
- List of host groups to add template to when template is created.
|
||||
- Replaces the current host groups the template belongs to if the template is already present.
|
||||
- Required when creating a new template with C(state=present) and I(template_name) is used.
|
||||
Not required when updating an existing template.
|
||||
required: false
|
||||
type: list
|
||||
elements: str
|
||||
link_templates:
|
||||
description:
|
||||
- List of template names to be linked to the template.
|
||||
- Templates that are not specified and are linked to the existing template will be only unlinked and not
|
||||
cleared from the template.
|
||||
required: false
|
||||
type: list
|
||||
elements: str
|
||||
clear_templates:
|
||||
description:
|
||||
- List of template names to be unlinked and cleared from the template.
|
||||
- This option is ignored if template is being created for the first time.
|
||||
required: false
|
||||
type: list
|
||||
elements: str
|
||||
macros:
|
||||
description:
|
||||
- List of user macros to create for the template.
|
||||
- Macros that are not specified and are present on the existing template will be replaced.
|
||||
- See examples on how to pass macros.
|
||||
required: false
|
||||
type: list
|
||||
elements: dict
|
||||
suboptions:
|
||||
name:
|
||||
description:
|
||||
- Name of the macro.
|
||||
- Must be specified in {$NAME} format.
|
||||
type: str
|
||||
value:
|
||||
description:
|
||||
- Value of the macro.
|
||||
type: str
|
||||
dump_format:
|
||||
description:
|
||||
- Format to use when dumping template with C(state=dump).
|
||||
- This option is deprecated and will eventually be removed in 2.14.
|
||||
required: false
|
||||
choices: [json, xml]
|
||||
default: "json"
|
||||
type: str
|
||||
omit_date:
|
||||
description:
|
||||
- Removes the date field for the exported/dumped template
|
||||
- Requires C(state=dump)
|
||||
required: false
|
||||
type: bool
|
||||
default: false
|
||||
state:
|
||||
description:
|
||||
- Required state of the template.
|
||||
- On C(state=present) template will be created/imported or updated depending if it is already present.
|
||||
- On C(state=dump) template content will get dumped into required format specified in I(dump_format).
|
||||
- On C(state=absent) template will be deleted.
|
||||
- The C(state=dump) is deprecated and will eventually be removed in 2.14. The M(zabbix_template_info) module should be used instead.
|
||||
required: false
|
||||
choices: [present, absent, dump]
|
||||
default: "present"
|
||||
type: str
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.zabbix
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
---
|
||||
- name: Create a new Zabbix template linked to groups, macros and templates
|
||||
local_action:
|
||||
module: zabbix_template
|
||||
server_url: http://127.0.0.1
|
||||
login_user: username
|
||||
login_password: password
|
||||
template_name: ExampleHost
|
||||
template_groups:
|
||||
- Role
|
||||
- Role2
|
||||
link_templates:
|
||||
- Example template1
|
||||
- Example template2
|
||||
macros:
|
||||
- macro: '{$EXAMPLE_MACRO1}'
|
||||
value: 30000
|
||||
- macro: '{$EXAMPLE_MACRO2}'
|
||||
value: 3
|
||||
- macro: '{$EXAMPLE_MACRO3}'
|
||||
value: 'Example'
|
||||
state: present
|
||||
|
||||
- name: Unlink and clear templates from the existing Zabbix template
|
||||
local_action:
|
||||
module: zabbix_template
|
||||
server_url: http://127.0.0.1
|
||||
login_user: username
|
||||
login_password: password
|
||||
template_name: ExampleHost
|
||||
clear_templates:
|
||||
- Example template3
|
||||
- Example template4
|
||||
state: present
|
||||
|
||||
- name: Import Zabbix templates from JSON
|
||||
local_action:
|
||||
module: zabbix_template
|
||||
server_url: http://127.0.0.1
|
||||
login_user: username
|
||||
login_password: password
|
||||
template_json: "{{ lookup('file', 'zabbix_apache2.json') }}"
|
||||
state: present
|
||||
|
||||
- name: Import Zabbix templates from XML
|
||||
local_action:
|
||||
module: zabbix_template
|
||||
server_url: http://127.0.0.1
|
||||
login_user: username
|
||||
login_password: password
|
||||
template_xml: "{{ lookup('file', 'zabbix_apache2.json') }}"
|
||||
state: present
|
||||
|
||||
- name: Import Zabbix template from Ansible dict variable
|
||||
zabbix_template:
|
||||
login_user: username
|
||||
login_password: password
|
||||
server_url: http://127.0.0.1
|
||||
template_json:
|
||||
zabbix_export:
|
||||
version: '3.2'
|
||||
templates:
|
||||
- name: Template for Testing
|
||||
description: 'Testing template import'
|
||||
template: Test Template
|
||||
groups:
|
||||
- name: Templates
|
||||
applications:
|
||||
- name: Test Application
|
||||
state: present
|
||||
|
||||
- name: Configure macros on the existing Zabbix template
|
||||
local_action:
|
||||
module: zabbix_template
|
||||
server_url: http://127.0.0.1
|
||||
login_user: username
|
||||
login_password: password
|
||||
template_name: Template
|
||||
macros:
|
||||
- macro: '{$TEST_MACRO}'
|
||||
value: 'Example'
|
||||
state: present
|
||||
|
||||
- name: Delete Zabbix template
|
||||
local_action:
|
||||
module: zabbix_template
|
||||
server_url: http://127.0.0.1
|
||||
login_user: username
|
||||
login_password: password
|
||||
template_name: Template
|
||||
state: absent
|
||||
|
||||
- name: Dump Zabbix template as JSON
|
||||
local_action:
|
||||
module: zabbix_template
|
||||
server_url: http://127.0.0.1
|
||||
login_user: username
|
||||
login_password: password
|
||||
template_name: Template
|
||||
omit_date: yes
|
||||
state: dump
|
||||
register: template_dump
|
||||
|
||||
- name: Dump Zabbix template as XML
|
||||
local_action:
|
||||
module: zabbix_template
|
||||
server_url: http://127.0.0.1
|
||||
login_user: username
|
||||
login_password: password
|
||||
template_name: Template
|
||||
dump_format: xml
|
||||
omit_date: false
|
||||
state: dump
|
||||
register: template_dump
|
||||
'''
|
||||
|
||||
RETURN = r'''
|
||||
---
|
||||
template_json:
|
||||
description: The JSON dump of the template
|
||||
returned: when state is dump and omit_date is no
|
||||
type: str
|
||||
sample: {
|
||||
"zabbix_export":{
|
||||
"date":"2017-11-29T16:37:24Z",
|
||||
"templates":[{
|
||||
"templates":[],
|
||||
"description":"",
|
||||
"httptests":[],
|
||||
"screens":[],
|
||||
"applications":[],
|
||||
"discovery_rules":[],
|
||||
"groups":[{"name":"Templates"}],
|
||||
"name":"Test Template",
|
||||
"items":[],
|
||||
"macros":[],
|
||||
"template":"test"
|
||||
}],
|
||||
"version":"3.2",
|
||||
"groups":[{
|
||||
"name":"Templates"
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
template_xml:
|
||||
description: dump of the template in XML representation
|
||||
returned: when state is dump, dump_format is xml and omit_date is yes
|
||||
type: str
|
||||
sample: |-
|
||||
<?xml version="1.0" ?>
|
||||
<zabbix_export>
|
||||
<version>4.2</version>
|
||||
<groups>
|
||||
<group>
|
||||
<name>Templates</name>
|
||||
</group>
|
||||
</groups>
|
||||
<templates>
|
||||
<template>
|
||||
<template>test</template>
|
||||
<name>Test Template</name>
|
||||
<description/>
|
||||
<groups>
|
||||
<group>
|
||||
<name>Templates</name>
|
||||
</group>
|
||||
</groups>
|
||||
<applications/>
|
||||
<items/>
|
||||
<discovery_rules/>
|
||||
<httptests/>
|
||||
<macros/>
|
||||
<templates/>
|
||||
<screens/>
|
||||
<tags/>
|
||||
</template>
|
||||
</templates>
|
||||
</zabbix_export>
|
||||
'''
|
||||
|
||||
|
||||
import atexit
|
||||
import json
|
||||
import traceback
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
from distutils.version import LooseVersion
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
try:
|
||||
from zabbix_api import ZabbixAPI, ZabbixAPIException
|
||||
|
||||
HAS_ZABBIX_API = True
|
||||
except ImportError:
|
||||
ZBX_IMP_ERR = traceback.format_exc()
|
||||
HAS_ZABBIX_API = False
|
||||
|
||||
|
||||
class Template(object):
|
||||
def __init__(self, module, zbx):
|
||||
self._module = module
|
||||
self._zapi = zbx
|
||||
|
||||
# check if host group exists
|
||||
def check_host_group_exist(self, group_names):
|
||||
for group_name in group_names:
|
||||
result = self._zapi.hostgroup.get({'filter': {'name': group_name}})
|
||||
if not result:
|
||||
self._module.fail_json(msg="Hostgroup not found: %s" %
|
||||
group_name)
|
||||
return True
|
||||
|
||||
# get group ids by group names
|
||||
def get_group_ids_by_group_names(self, group_names):
|
||||
group_ids = []
|
||||
if group_names is None or len(group_names) == 0:
|
||||
return group_ids
|
||||
if self.check_host_group_exist(group_names):
|
||||
group_list = self._zapi.hostgroup.get(
|
||||
{'output': 'extend',
|
||||
'filter': {'name': group_names}})
|
||||
for group in group_list:
|
||||
group_id = group['groupid']
|
||||
group_ids.append({'groupid': group_id})
|
||||
return group_ids
|
||||
|
||||
def get_template_ids(self, template_list):
|
||||
template_ids = []
|
||||
if template_list is None or len(template_list) == 0:
|
||||
return template_ids
|
||||
for template in template_list:
|
||||
template_list = self._zapi.template.get(
|
||||
{'output': 'extend',
|
||||
'filter': {'host': template}})
|
||||
if len(template_list) < 1:
|
||||
continue
|
||||
else:
|
||||
template_id = template_list[0]['templateid']
|
||||
template_ids.append(template_id)
|
||||
return template_ids
|
||||
|
||||
def add_template(self, template_name, group_ids, link_template_ids, macros):
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
|
||||
self._zapi.template.create({'host': template_name, 'groups': group_ids, 'templates': link_template_ids,
|
||||
'macros': macros})
|
||||
|
||||
def check_template_changed(self, template_ids, template_groups, link_templates, clear_templates,
|
||||
template_macros, template_content, template_type):
|
||||
"""Compares template parameters to already existing values if any are found.
|
||||
|
||||
template_json - JSON structures are compared as deep sorted dictionaries,
|
||||
template_xml - XML structures are compared as strings, but filtered and formatted first,
|
||||
If none above is used, all the other arguments are compared to their existing counterparts
|
||||
retrieved from Zabbix API."""
|
||||
changed = False
|
||||
# Compare filtered and formatted XMLs strings for any changes. It is expected that provided
|
||||
# XML has same structure as Zabbix uses (e.g. it was optimally exported via Zabbix GUI or API)
|
||||
if template_content is not None and template_type == 'xml':
|
||||
existing_template = self.dump_template(template_ids, template_type='xml')
|
||||
|
||||
if self.filter_xml_template(template_content) != self.filter_xml_template(existing_template):
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
|
||||
existing_template = self.dump_template(template_ids, template_type='json')
|
||||
# Compare JSON objects as deep sorted python dictionaries
|
||||
if template_content is not None and template_type == 'json':
|
||||
parsed_template_json = self.load_json_template(template_content)
|
||||
if self.diff_template(parsed_template_json, existing_template):
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
|
||||
# If neither template_json or template_xml were used, user provided all parameters via module options
|
||||
if template_groups is not None:
|
||||
existing_groups = [g['name'] for g in existing_template['zabbix_export']['groups']]
|
||||
|
||||
if set(template_groups) != set(existing_groups):
|
||||
changed = True
|
||||
|
||||
if 'templates' not in existing_template['zabbix_export']['templates'][0]:
|
||||
existing_template['zabbix_export']['templates'][0]['templates'] = []
|
||||
|
||||
# Check if any new templates would be linked or any existing would be unlinked
|
||||
exist_child_templates = [t['name'] for t in existing_template['zabbix_export']['templates'][0]['templates']]
|
||||
if link_templates is not None:
|
||||
if set(link_templates) != set(exist_child_templates):
|
||||
changed = True
|
||||
else:
|
||||
if set([]) != set(exist_child_templates):
|
||||
changed = True
|
||||
|
||||
# Mark that there will be changes when at least one existing template will be unlinked
|
||||
if clear_templates is not None:
|
||||
for t in clear_templates:
|
||||
if t in exist_child_templates:
|
||||
changed = True
|
||||
break
|
||||
|
||||
if 'macros' not in existing_template['zabbix_export']['templates'][0]:
|
||||
existing_template['zabbix_export']['templates'][0]['macros'] = []
|
||||
|
||||
if template_macros is not None:
|
||||
existing_macros = existing_template['zabbix_export']['templates'][0]['macros']
|
||||
if template_macros != existing_macros:
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
|
||||
def update_template(self, template_ids, group_ids, link_template_ids, clear_template_ids, template_macros):
|
||||
template_changes = {}
|
||||
if group_ids is not None:
|
||||
template_changes.update({'groups': group_ids})
|
||||
|
||||
if link_template_ids is not None:
|
||||
template_changes.update({'templates': link_template_ids})
|
||||
else:
|
||||
template_changes.update({'templates': []})
|
||||
|
||||
if clear_template_ids is not None:
|
||||
template_changes.update({'templates_clear': clear_template_ids})
|
||||
|
||||
if template_macros is not None:
|
||||
template_changes.update({'macros': template_macros})
|
||||
|
||||
if template_changes:
|
||||
# If we got here we know that only one template was provided via template_name
|
||||
template_changes.update({'templateid': template_ids[0]})
|
||||
self._zapi.template.update(template_changes)
|
||||
|
||||
def delete_template(self, templateids):
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
self._zapi.template.delete(templateids)
|
||||
|
||||
def ordered_json(self, obj):
|
||||
# Deep sort json dicts for comparison
|
||||
if isinstance(obj, dict):
|
||||
return sorted((k, self.ordered_json(v)) for k, v in obj.items())
|
||||
if isinstance(obj, list):
|
||||
return sorted(self.ordered_json(x) for x in obj)
|
||||
else:
|
||||
return obj
|
||||
|
||||
def dump_template(self, template_ids, template_type='json', omit_date=False):
|
||||
if self._module.check_mode:
|
||||
self._module.exit_json(changed=True)
|
||||
|
||||
try:
|
||||
dump = self._zapi.configuration.export({'format': template_type, 'options': {'templates': template_ids}})
|
||||
if template_type == 'xml':
|
||||
xmlroot = ET.fromstring(dump.encode('utf-8'))
|
||||
# remove date field if requested
|
||||
if omit_date:
|
||||
date = xmlroot.find(".date")
|
||||
if date is not None:
|
||||
xmlroot.remove(date)
|
||||
return str(ET.tostring(xmlroot, encoding='utf-8').decode('utf-8'))
|
||||
else:
|
||||
return self.load_json_template(dump, omit_date=omit_date)
|
||||
|
||||
except ZabbixAPIException as e:
|
||||
self._module.fail_json(msg='Unable to export template: %s' % e)
|
||||
|
||||
def diff_template(self, template_json_a, template_json_b):
|
||||
# Compare 2 zabbix templates and return True if they differ.
|
||||
template_json_a = self.filter_template(template_json_a)
|
||||
template_json_b = self.filter_template(template_json_b)
|
||||
if self.ordered_json(template_json_a) == self.ordered_json(template_json_b):
|
||||
return False
|
||||
return True
|
||||
|
||||
def filter_template(self, template_json):
|
||||
# Filter the template json to contain only the keys we will update
|
||||
keep_keys = set(['graphs', 'templates', 'triggers', 'value_maps'])
|
||||
unwanted_keys = set(template_json['zabbix_export']) - keep_keys
|
||||
for unwanted_key in unwanted_keys:
|
||||
del template_json['zabbix_export'][unwanted_key]
|
||||
|
||||
# Versions older than 2.4 do not support description field within template
|
||||
desc_not_supported = False
|
||||
if LooseVersion(self._zapi.api_version()).version[:2] < LooseVersion('2.4').version:
|
||||
desc_not_supported = True
|
||||
|
||||
# Filter empty attributes from template object to allow accurate comparison
|
||||
for template in template_json['zabbix_export']['templates']:
|
||||
for key in list(template.keys()):
|
||||
if not template[key] or (key == 'description' and desc_not_supported):
|
||||
template.pop(key)
|
||||
|
||||
return template_json
|
||||
|
||||
def filter_xml_template(self, template_xml):
|
||||
"""Filters out keys from XML template that may wary between exports (e.g date or version) and
|
||||
keys that are not imported via this module.
|
||||
|
||||
It is advised that provided XML template exactly matches XML structure used by Zabbix"""
|
||||
# Strip last new line and convert string to ElementTree
|
||||
parsed_xml_root = self.load_xml_template(template_xml.strip())
|
||||
keep_keys = ['graphs', 'templates', 'triggers', 'value_maps']
|
||||
|
||||
# Remove unwanted XML nodes
|
||||
for node in list(parsed_xml_root):
|
||||
if node.tag not in keep_keys:
|
||||
parsed_xml_root.remove(node)
|
||||
|
||||
# Filter empty attributes from template objects to allow accurate comparison
|
||||
for template in list(parsed_xml_root.find('templates')):
|
||||
for element in list(template):
|
||||
if element.text is None and len(list(element)) == 0:
|
||||
template.remove(element)
|
||||
|
||||
# Filter new lines and indentation
|
||||
xml_root_text = list(line.strip() for line in ET.tostring(parsed_xml_root, encoding='utf8', method='xml').decode().split('\n'))
|
||||
return ''.join(xml_root_text)
|
||||
|
||||
def load_json_template(self, template_json, omit_date=False):
|
||||
try:
|
||||
jsondoc = json.loads(template_json)
|
||||
if omit_date and 'date' in jsondoc['zabbix_export']:
|
||||
del jsondoc['zabbix_export']['date']
|
||||
return jsondoc
|
||||
except ValueError as e:
|
||||
self._module.fail_json(msg='Invalid JSON provided', details=to_native(e), exception=traceback.format_exc())
|
||||
|
||||
def load_xml_template(self, template_xml):
|
||||
try:
|
||||
return ET.fromstring(template_xml)
|
||||
except ET.ParseError as e:
|
||||
self._module.fail_json(msg='Invalid XML provided', details=to_native(e), exception=traceback.format_exc())
|
||||
|
||||
def import_template(self, template_content, template_type='json'):
|
||||
# rules schema latest version
|
||||
update_rules = {
|
||||
'applications': {
|
||||
'createMissing': True,
|
||||
'deleteMissing': True
|
||||
},
|
||||
'discoveryRules': {
|
||||
'createMissing': True,
|
||||
'updateExisting': True,
|
||||
'deleteMissing': True
|
||||
},
|
||||
'graphs': {
|
||||
'createMissing': True,
|
||||
'updateExisting': True,
|
||||
'deleteMissing': True
|
||||
},
|
||||
'groups': {
|
||||
'createMissing': True
|
||||
},
|
||||
'httptests': {
|
||||
'createMissing': True,
|
||||
'updateExisting': True,
|
||||
'deleteMissing': True
|
||||
},
|
||||
'items': {
|
||||
'createMissing': True,
|
||||
'updateExisting': True,
|
||||
'deleteMissing': True
|
||||
},
|
||||
'templates': {
|
||||
'createMissing': True,
|
||||
'updateExisting': True
|
||||
},
|
||||
'templateLinkage': {
|
||||
'createMissing': True
|
||||
},
|
||||
'templateScreens': {
|
||||
'createMissing': True,
|
||||
'updateExisting': True,
|
||||
'deleteMissing': True
|
||||
},
|
||||
'triggers': {
|
||||
'createMissing': True,
|
||||
'updateExisting': True,
|
||||
'deleteMissing': True
|
||||
},
|
||||
'valueMaps': {
|
||||
'createMissing': True,
|
||||
'updateExisting': True
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
# old api version support here
|
||||
api_version = self._zapi.api_version()
|
||||
# updateExisting for application removed from zabbix api after 3.2
|
||||
if LooseVersion(api_version).version[:2] <= LooseVersion('3.2').version:
|
||||
update_rules['applications']['updateExisting'] = True
|
||||
|
||||
# templateLinkage.deleteMissing only available in 4.0 branch higher .16 and higher 4.4.4
|
||||
# it's not available in 4.2 branches or lower 4.0.16
|
||||
if LooseVersion(api_version).version[:2] == LooseVersion('4.0').version and \
|
||||
LooseVersion(api_version).version[:3] >= LooseVersion('4.0.16').version:
|
||||
update_rules['templateLinkage']['deleteMissing'] = True
|
||||
if LooseVersion(api_version).version[:3] >= LooseVersion('4.4.4').version:
|
||||
update_rules['templateLinkage']['deleteMissing'] = True
|
||||
|
||||
import_data = {'format': template_type, 'source': template_content, 'rules': update_rules}
|
||||
self._zapi.configuration.import_(import_data)
|
||||
except ZabbixAPIException as e:
|
||||
self._module.fail_json(msg='Unable to import template', details=to_native(e),
|
||||
exception=traceback.format_exc())
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
server_url=dict(type='str', required=True, aliases=['url']),
|
||||
login_user=dict(type='str', required=True),
|
||||
login_password=dict(type='str', required=True, no_log=True),
|
||||
http_login_user=dict(type='str', required=False, default=None),
|
||||
http_login_password=dict(type='str', required=False, default=None, no_log=True),
|
||||
validate_certs=dict(type='bool', required=False, default=True),
|
||||
template_name=dict(type='str', required=False),
|
||||
template_json=dict(type='json', required=False),
|
||||
template_xml=dict(type='str', required=False),
|
||||
template_groups=dict(type='list', required=False),
|
||||
link_templates=dict(type='list', required=False),
|
||||
clear_templates=dict(type='list', required=False),
|
||||
macros=dict(type='list', required=False),
|
||||
omit_date=dict(type='bool', required=False, default=False),
|
||||
dump_format=dict(type='str', required=False, default='json', choices=['json', 'xml']),
|
||||
state=dict(type='str', default="present", choices=['present', 'absent', 'dump']),
|
||||
timeout=dict(type='int', default=10)
|
||||
),
|
||||
required_one_of=[
|
||||
['template_name', 'template_json', 'template_xml']
|
||||
],
|
||||
mutually_exclusive=[
|
||||
['template_name', 'template_json', 'template_xml']
|
||||
],
|
||||
required_if=[
|
||||
['state', 'absent', ['template_name']],
|
||||
['state', 'dump', ['template_name']]
|
||||
],
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if not HAS_ZABBIX_API:
|
||||
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
|
||||
|
||||
server_url = module.params['server_url']
|
||||
login_user = module.params['login_user']
|
||||
login_password = module.params['login_password']
|
||||
http_login_user = module.params['http_login_user']
|
||||
http_login_password = module.params['http_login_password']
|
||||
validate_certs = module.params['validate_certs']
|
||||
template_name = module.params['template_name']
|
||||
template_json = module.params['template_json']
|
||||
template_xml = module.params['template_xml']
|
||||
template_groups = module.params['template_groups']
|
||||
link_templates = module.params['link_templates']
|
||||
clear_templates = module.params['clear_templates']
|
||||
template_macros = module.params['macros']
|
||||
omit_date = module.params['omit_date']
|
||||
dump_format = module.params['dump_format']
|
||||
state = module.params['state']
|
||||
timeout = module.params['timeout']
|
||||
|
||||
zbx = None
|
||||
try:
|
||||
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
|
||||
validate_certs=validate_certs)
|
||||
zbx.login(login_user, login_password)
|
||||
atexit.register(zbx.logout)
|
||||
except ZabbixAPIException as e:
|
||||
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
|
||||
|
||||
template = Template(module, zbx)
|
||||
|
||||
# Identify template names for IDs retrieval
|
||||
# Template names are expected to reside in ['zabbix_export']['templates'][*]['template'] for both data types
|
||||
template_content, template_type = None, None
|
||||
if template_json is not None:
|
||||
template_type = 'json'
|
||||
template_content = template_json
|
||||
json_parsed = template.load_json_template(template_content)
|
||||
template_names = list(t['template'] for t in json_parsed['zabbix_export']['templates'])
|
||||
|
||||
elif template_xml is not None:
|
||||
template_type = 'xml'
|
||||
template_content = template_xml
|
||||
xml_parsed = template.load_xml_template(template_content)
|
||||
template_names = list(t.find('template').text for t in list(xml_parsed.find('templates')))
|
||||
|
||||
else:
|
||||
template_names = [template_name]
|
||||
|
||||
template_ids = template.get_template_ids(template_names)
|
||||
|
||||
if state == "absent":
|
||||
if not template_ids:
|
||||
module.exit_json(changed=False, msg="Template not found. No changed: %s" % template_name)
|
||||
|
||||
template.delete_template(template_ids)
|
||||
module.exit_json(changed=True, result="Successfully deleted template %s" % template_name)
|
||||
|
||||
elif state == "dump":
|
||||
module.deprecate("The 'dump' state has been deprecated and will be removed, use 'zabbix_template_info' module instead.", version='2.14')
|
||||
if not template_ids:
|
||||
module.fail_json(msg='Template not found: %s' % template_name)
|
||||
|
||||
if dump_format == 'json':
|
||||
module.exit_json(changed=False, template_json=template.dump_template(template_ids, template_type='json', omit_date=omit_date))
|
||||
elif dump_format == 'xml':
|
||||
module.exit_json(changed=False, template_xml=template.dump_template(template_ids, template_type='xml', omit_date=omit_date))
|
||||
|
||||
elif state == "present":
|
||||
# Load all subelements for template that were provided by user
|
||||
group_ids = None
|
||||
if template_groups is not None:
|
||||
group_ids = template.get_group_ids_by_group_names(template_groups)
|
||||
|
||||
link_template_ids = None
|
||||
if link_templates is not None:
|
||||
link_template_ids = template.get_template_ids(link_templates)
|
||||
|
||||
clear_template_ids = None
|
||||
if clear_templates is not None:
|
||||
clear_template_ids = template.get_template_ids(clear_templates)
|
||||
|
||||
if template_macros is not None:
|
||||
# Zabbix configuration.export does not differentiate python types (numbers are returned as strings)
|
||||
for macroitem in template_macros:
|
||||
for key in macroitem:
|
||||
macroitem[key] = str(macroitem[key])
|
||||
|
||||
if not template_ids:
|
||||
# Assume new templates are being added when no ID's were found
|
||||
if template_content is not None:
|
||||
template.import_template(template_content, template_type)
|
||||
module.exit_json(changed=True, result="Template import successful")
|
||||
|
||||
else:
|
||||
if group_ids is None:
|
||||
module.fail_json(msg='template_groups are required when creating a new Zabbix template')
|
||||
|
||||
template.add_template(template_name, group_ids, link_template_ids, template_macros)
|
||||
module.exit_json(changed=True, result="Successfully added template: %s" % template_name)
|
||||
|
||||
else:
|
||||
changed = template.check_template_changed(template_ids, template_groups, link_templates, clear_templates,
|
||||
template_macros, template_content, template_type)
|
||||
|
||||
if module.check_mode:
|
||||
module.exit_json(changed=changed)
|
||||
|
||||
if changed:
|
||||
if template_type is not None:
|
||||
template.import_template(template_content, template_type)
|
||||
else:
|
||||
template.update_template(template_ids, group_ids, link_template_ids, clear_template_ids,
|
||||
template_macros)
|
||||
|
||||
module.exit_json(changed=changed, result="Template successfully updated")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
273
plugins/modules/monitoring/zabbix/zabbix_template_info.py
Normal file
273
plugins/modules/monitoring/zabbix/zabbix_template_info.py
Normal file
@@ -0,0 +1,273 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2019, sky-joker
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
module: zabbix_template_info
|
||||
short_description: Gather information about Zabbix template
|
||||
author:
|
||||
- sky-joker (@sky-joker)
|
||||
description:
|
||||
- This module allows you to search for Zabbix template.
|
||||
requirements:
|
||||
- "python >= 2.6"
|
||||
- "zabbix-api >= 0.5.4"
|
||||
options:
|
||||
template_name:
|
||||
description:
|
||||
- Name of the template in Zabbix.
|
||||
required: true
|
||||
type: str
|
||||
format:
|
||||
description:
|
||||
- Format to use when dumping template.
|
||||
choices: ['json', 'xml']
|
||||
default: json
|
||||
type: str
|
||||
omit_date:
|
||||
description:
|
||||
- Removes the date field for the dumped template
|
||||
required: false
|
||||
type: bool
|
||||
default: false
|
||||
extends_documentation_fragment:
|
||||
- community.general.zabbix
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Get Zabbix template as JSON
|
||||
zabbix_template_info:
|
||||
server_url: "http://zabbix.example.com/zabbix/"
|
||||
login_user: admin
|
||||
login_password: secret
|
||||
template_name: Template
|
||||
format: json
|
||||
omit_date: yes
|
||||
register: template_json
|
||||
|
||||
- name: Get Zabbix template as XML
|
||||
zabbix_template_info:
|
||||
server_url: "http://zabbix.example.com/zabbix/"
|
||||
login_user: admin
|
||||
login_password: secret
|
||||
template_name: Template
|
||||
format: xml
|
||||
omit_date: no
|
||||
register: template_json
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
---
|
||||
template_json:
|
||||
description: The JSON of the template
|
||||
returned: when format is json and omit_date is true
|
||||
type: str
|
||||
sample: {
|
||||
"zabbix_export": {
|
||||
"version": "4.0",
|
||||
"groups": [
|
||||
{
|
||||
"name": "Templates"
|
||||
}
|
||||
],
|
||||
"templates": [
|
||||
{
|
||||
"template": "Test Template",
|
||||
"name": "Template for Testing",
|
||||
"description": "Testing template import",
|
||||
"groups": [
|
||||
{
|
||||
"name": "Templates"
|
||||
}
|
||||
],
|
||||
"applications": [
|
||||
{
|
||||
"name": "Test Application"
|
||||
}
|
||||
],
|
||||
"items": [],
|
||||
"discovery_rules": [],
|
||||
"httptests": [],
|
||||
"macros": [],
|
||||
"templates": [],
|
||||
"screens": []
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
template_xml:
|
||||
description: The XML of the template
|
||||
returned: when format is xml and omit_date is false
|
||||
type: str
|
||||
sample: >-
|
||||
<zabbix_export>
|
||||
<version>4.0</version>
|
||||
<date>2019-10-27T14:49:57Z</date>
|
||||
<groups>
|
||||
<group>
|
||||
<name>Templates</name>
|
||||
</group>
|
||||
</groups>
|
||||
<templates>
|
||||
<template>
|
||||
<template>Test Template</template>
|
||||
<name>Template for Testing</name>
|
||||
<description>Testing template import</description>
|
||||
<groups>
|
||||
<group>
|
||||
<name>Templates</name>
|
||||
</group>
|
||||
</groups>
|
||||
<applications>
|
||||
<application>
|
||||
<name>Test Application</name>
|
||||
</application>
|
||||
</applications>
|
||||
<items />
|
||||
<discovery_rules />
|
||||
<httptests />
|
||||
<macros />
|
||||
<templates />
|
||||
<screens />
|
||||
</template>
|
||||
</templates>
|
||||
</zabbix_export>
|
||||
'''
|
||||
|
||||
import atexit
|
||||
import traceback
|
||||
import json
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
try:
|
||||
from zabbix_api import ZabbixAPI
|
||||
from zabbix_api import Already_Exists
|
||||
from zabbix_api import ZabbixAPIException
|
||||
|
||||
HAS_ZABBIX_API = True
|
||||
except ImportError:
|
||||
ZBX_IMP_ERR = traceback.format_exc()
|
||||
HAS_ZABBIX_API = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
class TemplateInfo(object):
|
||||
def __init__(self, module, zbx):
|
||||
self._module = module
|
||||
self._zapi = zbx
|
||||
|
||||
def get_template_id(self, template_name):
|
||||
template_id = []
|
||||
try:
|
||||
template_list = self._zapi.template.get(
|
||||
{
|
||||
'output': 'extend',
|
||||
'filter': {
|
||||
'host': template_name
|
||||
}
|
||||
}
|
||||
)
|
||||
except ZabbixAPIException as e:
|
||||
self._module.fail_json(msg='Failed to get template: %s' % e)
|
||||
|
||||
if template_list:
|
||||
template_id.append(template_list[0]['templateid'])
|
||||
|
||||
return template_id
|
||||
|
||||
def load_json_template(self, template_json, omit_date=False):
|
||||
try:
|
||||
jsondoc = json.loads(template_json)
|
||||
# remove date field if requested
|
||||
if omit_date and 'date' in jsondoc['zabbix_export']:
|
||||
del jsondoc['zabbix_export']['date']
|
||||
return jsondoc
|
||||
except ValueError as e:
|
||||
self._module.fail_json(msg='Invalid JSON provided', details=to_native(e), exception=traceback.format_exc())
|
||||
|
||||
def dump_template(self, template_id, template_type='json', omit_date=False):
|
||||
try:
|
||||
dump = self._zapi.configuration.export({'format': template_type, 'options': {'templates': template_id}})
|
||||
if template_type == 'xml':
|
||||
xmlroot = ET.fromstring(dump.encode('utf-8'))
|
||||
# remove date field if requested
|
||||
if omit_date:
|
||||
date = xmlroot.find(".date")
|
||||
if date is not None:
|
||||
xmlroot.remove(date)
|
||||
return str(ET.tostring(xmlroot, encoding='utf-8').decode('utf-8'))
|
||||
else:
|
||||
return self.load_json_template(dump, omit_date)
|
||||
|
||||
except ZabbixAPIException as e:
|
||||
self._module.fail_json(msg='Unable to export template: %s' % e)
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
server_url=dict(type='str', required=True, aliases=['url']),
|
||||
login_user=dict(type='str', required=True),
|
||||
login_password=dict(type='str', required=True, no_log=True),
|
||||
http_login_user=dict(type='str', required=False, default=None),
|
||||
http_login_password=dict(type='str', required=False, default=None, no_log=True),
|
||||
validate_certs=dict(type='bool', required=False, default=True),
|
||||
timeout=dict(type='int', default=10),
|
||||
template_name=dict(type='str', required=True),
|
||||
omit_date=dict(type='bool', required=False, default=False),
|
||||
format=dict(type='str', choices=['json', 'xml'], default='json')
|
||||
),
|
||||
supports_check_mode=False
|
||||
)
|
||||
|
||||
if not HAS_ZABBIX_API:
|
||||
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'),
|
||||
exception=ZBX_IMP_ERR)
|
||||
|
||||
server_url = module.params['server_url']
|
||||
login_user = module.params['login_user']
|
||||
login_password = module.params['login_password']
|
||||
http_login_user = module.params['http_login_user']
|
||||
http_login_password = module.params['http_login_password']
|
||||
validate_certs = module.params['validate_certs']
|
||||
timeout = module.params['timeout']
|
||||
template_name = module.params['template_name']
|
||||
omit_date = module.params['omit_date']
|
||||
format = module.params['format']
|
||||
|
||||
try:
|
||||
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
|
||||
validate_certs=validate_certs)
|
||||
zbx.login(login_user, login_password)
|
||||
atexit.register(zbx.logout)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
|
||||
|
||||
template_info = TemplateInfo(module, zbx)
|
||||
|
||||
template_id = template_info.get_template_id(template_name)
|
||||
|
||||
if not template_id:
|
||||
module.fail_json(msg='Template not found: %s' % template_name)
|
||||
|
||||
if format == 'json':
|
||||
module.exit_json(changed=False, template_json=template_info.dump_template(template_id, template_type='json', omit_date=omit_date))
|
||||
elif format == 'xml':
|
||||
module.exit_json(changed=False, template_xml=template_info.dump_template(template_id, template_type='xml', omit_date=omit_date))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
663
plugins/modules/monitoring/zabbix/zabbix_user.py
Normal file
663
plugins/modules/monitoring/zabbix/zabbix_user.py
Normal file
@@ -0,0 +1,663 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2019, sky-joker
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
module: zabbix_user
|
||||
short_description: Create/update/delete Zabbix users
|
||||
author:
|
||||
- sky-joker (@sky-joker)
|
||||
description:
|
||||
- This module allows you to create, modify and delete Zabbix users.
|
||||
requirements:
|
||||
- "python >= 2.6"
|
||||
- "zabbix-api >= 0.5.4"
|
||||
options:
|
||||
alias:
|
||||
description:
|
||||
- Name of the user alias in Zabbix.
|
||||
- alias is the unique identifier used and cannot be updated using this module.
|
||||
required: true
|
||||
type: str
|
||||
name:
|
||||
description:
|
||||
- Name of the user.
|
||||
default: ''
|
||||
type: str
|
||||
surname:
|
||||
description:
|
||||
- Surname of the user.
|
||||
default: ''
|
||||
type: str
|
||||
usrgrps:
|
||||
description:
|
||||
- User groups to add the user to.
|
||||
required: true
|
||||
type: list
|
||||
elements: str
|
||||
passwd:
|
||||
description:
|
||||
- User's password.
|
||||
required: true
|
||||
type: str
|
||||
override_passwd:
|
||||
description:
|
||||
- Override password.
|
||||
default: no
|
||||
type: bool
|
||||
lang:
|
||||
description:
|
||||
- Language code of the user's language.
|
||||
default: 'en_GB'
|
||||
choices:
|
||||
- 'en_GB'
|
||||
- 'en_US'
|
||||
- 'zh_CN'
|
||||
- 'cs_CZ'
|
||||
- 'fr_FR'
|
||||
- 'he_IL'
|
||||
- 'it_IT'
|
||||
- 'ko_KR'
|
||||
- 'ja_JP'
|
||||
- 'nb_NO'
|
||||
- 'pl_PL'
|
||||
- 'pt_BR'
|
||||
- 'pt_PT'
|
||||
- 'ru_RU'
|
||||
- 'sk_SK'
|
||||
- 'tr_TR'
|
||||
- 'uk_UA'
|
||||
type: str
|
||||
theme:
|
||||
description:
|
||||
- User's theme.
|
||||
default: 'default'
|
||||
choices:
|
||||
- 'default'
|
||||
- 'blue-theme'
|
||||
- 'dark-theme'
|
||||
type: str
|
||||
autologin:
|
||||
description:
|
||||
- Whether to enable auto-login.
|
||||
- If enable autologin, cannot enable autologout.
|
||||
default: false
|
||||
type: bool
|
||||
autologout:
|
||||
description:
|
||||
- User session life time in seconds. If set to 0, the session will never expire.
|
||||
- If enable autologout, cannot enable autologin.
|
||||
default: '0'
|
||||
type: str
|
||||
refresh:
|
||||
description:
|
||||
- Automatic refresh period in seconds.
|
||||
default: '30'
|
||||
type: str
|
||||
rows_per_page:
|
||||
description:
|
||||
- Amount of object rows to show per page.
|
||||
default: '50'
|
||||
type: str
|
||||
after_login_url:
|
||||
description:
|
||||
- URL of the page to redirect the user to after logging in.
|
||||
default: ''
|
||||
type: str
|
||||
user_medias:
|
||||
description:
|
||||
- Set the user's media.
|
||||
default: []
|
||||
suboptions:
|
||||
mediatype:
|
||||
description:
|
||||
- Media type name to set.
|
||||
default: 'Email'
|
||||
type: str
|
||||
sendto:
|
||||
description:
|
||||
- Address, user name or other identifier of the recipient.
|
||||
required: true
|
||||
type: str
|
||||
period:
|
||||
description:
|
||||
- Time when the notifications can be sent as a time period or user macros separated by a semicolon.
|
||||
- Please review the documentation for more information on the supported time period.
|
||||
- https://www.zabbix.com/documentation/4.0/manual/appendix/time_period
|
||||
default: '1-7,00:00-24:00'
|
||||
type: str
|
||||
severity:
|
||||
description:
|
||||
- Trigger severities to send notifications about.
|
||||
suboptions:
|
||||
not_classified:
|
||||
description:
|
||||
- severity not_classified enable/disable.
|
||||
default: True
|
||||
type: bool
|
||||
information:
|
||||
description:
|
||||
- severity information enable/disable.
|
||||
default: True
|
||||
type: bool
|
||||
warning:
|
||||
description:
|
||||
- severity warning enable/disable.
|
||||
default: True
|
||||
type: bool
|
||||
average:
|
||||
description:
|
||||
- severity average enable/disable.
|
||||
default: True
|
||||
type: bool
|
||||
high:
|
||||
description:
|
||||
- severity high enable/disable.
|
||||
default: True
|
||||
type: bool
|
||||
disaster:
|
||||
description:
|
||||
- severity disaster enable/disable.
|
||||
default: True
|
||||
type: bool
|
||||
default:
|
||||
not_classified: True
|
||||
information: True
|
||||
warning: True
|
||||
average: True
|
||||
high: True
|
||||
disaster: True
|
||||
type: dict
|
||||
active:
|
||||
description:
|
||||
- Whether the media is enabled.
|
||||
default: true
|
||||
type: bool
|
||||
type: list
|
||||
elements: dict
|
||||
type:
|
||||
description:
|
||||
- Type of the user.
|
||||
default: 'Zabbix user'
|
||||
choices:
|
||||
- 'Zabbix user'
|
||||
- 'Zabbix admin'
|
||||
- 'Zabbix super admin'
|
||||
type: str
|
||||
state:
|
||||
description:
|
||||
- State of the user.
|
||||
- On C(present), it will create if user does not exist or update the user if the associated data is different.
|
||||
- On C(absent) will remove a user if it exists.
|
||||
default: 'present'
|
||||
choices: ['present', 'absent']
|
||||
type: str
|
||||
extends_documentation_fragment:
|
||||
- community.general.zabbix
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: create of zabbix user.
|
||||
zabbix_user:
|
||||
server_url: "http://zabbix.example.com/zabbix/"
|
||||
login_user: Admin
|
||||
login_password: secret
|
||||
alias: example
|
||||
name: user name
|
||||
surname: user surname
|
||||
usrgrps:
|
||||
- Guests
|
||||
- Disabled
|
||||
passwd: password
|
||||
lang: en_GB
|
||||
theme: blue-theme
|
||||
autologin: no
|
||||
autologout: '0'
|
||||
refresh: '30'
|
||||
rows_per_page: '200'
|
||||
after_login_url: ''
|
||||
user_medias:
|
||||
- mediatype: Email
|
||||
sendto: example@example.com
|
||||
period: 1-7,00:00-24:00
|
||||
severity:
|
||||
not_classified: no
|
||||
information: yes
|
||||
warning: yes
|
||||
average: yes
|
||||
high: yes
|
||||
disaster: yes
|
||||
active: no
|
||||
type: Zabbix super admin
|
||||
state: present
|
||||
|
||||
- name: delete of zabbix user.
|
||||
zabbix_user:
|
||||
server_url: "http://zabbix.example.com/zabbix/"
|
||||
login_user: admin
|
||||
login_password: secret
|
||||
alias: example
|
||||
usrgrps:
|
||||
- Guests
|
||||
passwd: password
|
||||
user_medias:
|
||||
- sendto: example@example.com
|
||||
state: absent
|
||||
'''
|
||||
|
||||
RETURN = r'''
|
||||
user_ids:
|
||||
description: User id created or changed
|
||||
returned: success
|
||||
type: dict
|
||||
sample: { "userids": [ "5" ] }
|
||||
'''
|
||||
|
||||
import atexit
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from zabbix_api import ZabbixAPI
|
||||
from zabbix_api import Already_Exists
|
||||
|
||||
HAS_ZABBIX_API = True
|
||||
except ImportError:
|
||||
ZBX_IMP_ERR = traceback.format_exc()
|
||||
HAS_ZABBIX_API = False
|
||||
|
||||
from distutils.version import LooseVersion
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
import copy
|
||||
|
||||
|
||||
class User(object):
|
||||
def __init__(self, module, zbx):
|
||||
self._module = module
|
||||
self._zapi = zbx
|
||||
self._zbx_api_version = zbx.api_version()[:3]
|
||||
|
||||
def get_usergroupid_by_user_group_name(self, usrgrps):
|
||||
user_group_ids = []
|
||||
for user_group_name in usrgrps:
|
||||
user_group = self._zapi.usergroup.get({'output': 'extend', 'filter': {'name': user_group_name}})
|
||||
if user_group:
|
||||
user_group_ids.append({'usrgrpid': user_group[0]['usrgrpid']})
|
||||
else:
|
||||
self._module.fail_json(msg="User group not found: %s" % user_group_name)
|
||||
return user_group_ids
|
||||
|
||||
def check_user_exist(self, alias):
|
||||
zbx_user = self._zapi.user.get({'output': 'extend', 'filter': {'alias': alias},
|
||||
'getAccess': True, 'selectMedias': 'extend',
|
||||
'selectUsrgrps': 'extend'})
|
||||
|
||||
return zbx_user
|
||||
|
||||
def convert_user_medias_parameter_types(self, user_medias):
|
||||
copy_user_medias = copy.deepcopy(user_medias)
|
||||
for user_media in copy_user_medias:
|
||||
media_types = self._zapi.mediatype.get({'output': 'extend'})
|
||||
for media_type in media_types:
|
||||
if LooseVersion(self._zbx_api_version) < LooseVersion('4.4'):
|
||||
if media_type['description'] == user_media['mediatype']:
|
||||
user_media['mediatypeid'] = media_type['mediatypeid']
|
||||
break
|
||||
else:
|
||||
if media_type['name'] == user_media['mediatype']:
|
||||
user_media['mediatypeid'] = media_type['mediatypeid']
|
||||
break
|
||||
|
||||
if 'mediatypeid' not in user_media:
|
||||
self._module.fail_json(msg="Media type not found: %s" % user_media['mediatype'])
|
||||
else:
|
||||
del user_media['mediatype']
|
||||
|
||||
severity_binary_number = ''
|
||||
for severity_key in 'disaster', 'high', 'average', 'warning', 'information', 'not_classified':
|
||||
if user_media['severity'][severity_key]:
|
||||
severity_binary_number = severity_binary_number + '1'
|
||||
else:
|
||||
severity_binary_number = severity_binary_number + '0'
|
||||
user_media['severity'] = str(int(severity_binary_number, 2))
|
||||
|
||||
if user_media['active']:
|
||||
user_media['active'] = '0'
|
||||
else:
|
||||
user_media['active'] = '1'
|
||||
|
||||
return copy_user_medias
|
||||
|
||||
def user_parameter_difference_check(self, zbx_user, alias, name, surname, user_group_ids, passwd, lang, theme,
|
||||
autologin, autologout, refresh, rows_per_page, url, user_medias, user_type,
|
||||
override_passwd):
|
||||
|
||||
user_medias = self.convert_user_medias_parameter_types(user_medias)
|
||||
|
||||
# existing data
|
||||
existing_data = copy.deepcopy(zbx_user[0])
|
||||
usrgrpids = []
|
||||
for usrgrp in existing_data['usrgrps']:
|
||||
usrgrpids.append({'usrgrpid': usrgrp['usrgrpid']})
|
||||
|
||||
existing_data['usrgrps'] = sorted(usrgrpids, key=lambda x: x['usrgrpid'])
|
||||
|
||||
# Processing for zabbix 4.0 and above.
|
||||
# In zabbix 4.0 and above, Email sendto is of type list.
|
||||
# This module, one media supports only one Email sendto.
|
||||
# Therefore following processing extract one Email from list.
|
||||
if LooseVersion(self._zbx_api_version) >= LooseVersion('4.0'):
|
||||
for media in existing_data['medias']:
|
||||
if isinstance(media['sendto'], list):
|
||||
media['sendto'] = media['sendto'][0]
|
||||
|
||||
existing_data['user_medias'] = sorted(existing_data['medias'], key=lambda x: x['sendto'])
|
||||
for del_key in ['medias', 'attempt_clock', 'attempt_failed', 'attempt_ip', 'debug_mode', 'users_status',
|
||||
'gui_access']:
|
||||
del existing_data[del_key]
|
||||
|
||||
for user_media in existing_data['user_medias']:
|
||||
for del_key in ['mediaid', 'userid']:
|
||||
del user_media[del_key]
|
||||
|
||||
# request data
|
||||
request_data = {
|
||||
'userid': zbx_user[0]['userid'],
|
||||
'alias': alias,
|
||||
'name': name,
|
||||
'surname': surname,
|
||||
'usrgrps': sorted(user_group_ids, key=lambda x: x['usrgrpid']),
|
||||
'lang': lang,
|
||||
'theme': theme,
|
||||
'autologin': autologin,
|
||||
'autologout': autologout,
|
||||
'refresh': refresh,
|
||||
'rows_per_page': rows_per_page,
|
||||
'url': url,
|
||||
'user_medias': sorted(user_medias, key=lambda x: x['sendto']),
|
||||
'type': user_type
|
||||
}
|
||||
|
||||
if override_passwd:
|
||||
request_data['passwd'] = passwd
|
||||
|
||||
user_parameter_difference_check_result = True
|
||||
if existing_data == request_data:
|
||||
user_parameter_difference_check_result = False
|
||||
|
||||
diff_params = {
|
||||
"before": existing_data,
|
||||
"after": request_data
|
||||
}
|
||||
|
||||
return user_parameter_difference_check_result, diff_params
|
||||
|
||||
def add_user(self, alias, name, surname, user_group_ids, passwd, lang, theme, autologin, autologout, refresh,
|
||||
rows_per_page, url, user_medias, user_type):
|
||||
|
||||
user_medias = self.convert_user_medias_parameter_types(user_medias)
|
||||
|
||||
user_ids = {}
|
||||
|
||||
request_data = {
|
||||
'alias': alias,
|
||||
'name': name,
|
||||
'surname': surname,
|
||||
'usrgrps': user_group_ids,
|
||||
'passwd': passwd,
|
||||
'lang': lang,
|
||||
'theme': theme,
|
||||
'autologin': autologin,
|
||||
'autologout': autologout,
|
||||
'refresh': refresh,
|
||||
'rows_per_page': rows_per_page,
|
||||
'url': url,
|
||||
'user_medias': user_medias,
|
||||
'type': user_type
|
||||
}
|
||||
|
||||
diff_params = {}
|
||||
if not self._module.check_mode:
|
||||
try:
|
||||
user_ids = self._zapi.user.create(request_data)
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to create user %s: %s" % (alias, e))
|
||||
else:
|
||||
diff_params = {
|
||||
"before": "",
|
||||
"after": request_data
|
||||
}
|
||||
|
||||
return user_ids, diff_params
|
||||
|
||||
def update_user(self, zbx_user, alias, name, surname, user_group_ids, passwd, lang, theme, autologin, autologout,
|
||||
refresh, rows_per_page, url, user_medias, user_type, override_passwd):
|
||||
|
||||
user_medias = self.convert_user_medias_parameter_types(user_medias)
|
||||
|
||||
user_ids = {}
|
||||
|
||||
request_data = {
|
||||
'userid': zbx_user[0]['userid'],
|
||||
'alias': alias,
|
||||
'name': name,
|
||||
'surname': surname,
|
||||
'usrgrps': user_group_ids,
|
||||
'lang': lang,
|
||||
'theme': theme,
|
||||
'autologin': autologin,
|
||||
'autologout': autologout,
|
||||
'refresh': refresh,
|
||||
'rows_per_page': rows_per_page,
|
||||
'url': url,
|
||||
'type': user_type
|
||||
}
|
||||
|
||||
if override_passwd:
|
||||
request_data['passwd'] = passwd
|
||||
|
||||
# In the case of zabbix 3.2 or less, it is necessary to use updatemedia method to update media.
|
||||
if LooseVersion(self._zbx_api_version) <= LooseVersion('3.2'):
|
||||
try:
|
||||
user_ids = self._zapi.user.update(request_data)
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to update user %s: %s" % (alias, e))
|
||||
|
||||
try:
|
||||
user_ids = self._zapi.user.updatemedia({
|
||||
'users': [{'userid': zbx_user[0]['userid']}],
|
||||
'medias': user_medias
|
||||
})
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to update user medias %s: %s" % (alias, e))
|
||||
|
||||
if LooseVersion(self._zbx_api_version) >= LooseVersion('3.4'):
|
||||
try:
|
||||
request_data['user_medias'] = user_medias
|
||||
user_ids = self._zapi.user.update(request_data)
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to update user %s: %s" % (alias, e))
|
||||
|
||||
return user_ids
|
||||
|
||||
def delete_user(self, zbx_user, alias):
|
||||
user_ids = {}
|
||||
diff_params = {}
|
||||
|
||||
if not self._module.check_mode:
|
||||
try:
|
||||
user_ids = self._zapi.user.delete([zbx_user[0]['userid']])
|
||||
except Exception as e:
|
||||
self._module.fail_json(msg="Failed to delete user %s: %s" % (alias, e))
|
||||
else:
|
||||
diff_params = {
|
||||
"before": zbx_user[0],
|
||||
"after": ""
|
||||
}
|
||||
|
||||
return user_ids, diff_params
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
server_url=dict(type='str', required=True, aliases=['url']),
|
||||
login_user=dict(type='str', required=True),
|
||||
login_password=dict(type='str', required=True, no_log=True),
|
||||
http_login_user=dict(type='str', required=False, default=None),
|
||||
http_login_password=dict(type='str', required=False, default=None, no_log=True),
|
||||
validate_certs=dict(type='bool', required=False, default=True),
|
||||
alias=dict(type='str', required=True),
|
||||
name=dict(type='str', default=''),
|
||||
surname=dict(type='str', default=''),
|
||||
usrgrps=dict(type='list', required=True),
|
||||
passwd=dict(type='str', required=True, no_log=True),
|
||||
override_passwd=dict(type='bool', required=False, default=False),
|
||||
lang=dict(type='str', default='en_GB', choices=['en_GB', 'en_US', 'zh_CN', 'cs_CZ', 'fr_FR',
|
||||
'he_IL', 'it_IT', 'ko_KR', 'ja_JP', 'nb_NO',
|
||||
'pl_PL', 'pt_BR', 'pt_PT', 'ru_RU', 'sk_SK',
|
||||
'tr_TR', 'uk_UA']),
|
||||
theme=dict(type='str', default='default', choices=['default', 'blue-theme', 'dark-theme']),
|
||||
autologin=dict(type='bool', default=False),
|
||||
autologout=dict(type='str', default='0'),
|
||||
refresh=dict(type='str', default='30'),
|
||||
rows_per_page=dict(type='str', default='50'),
|
||||
after_login_url=dict(type='str', default=''),
|
||||
user_medias=dict(type='list', default=[],
|
||||
elements='dict',
|
||||
options=dict(
|
||||
mediatype=dict(type='str', default='Email'),
|
||||
sendto=dict(type='str', required=True),
|
||||
period=dict(type='str', default='1-7,00:00-24:00'),
|
||||
severity=dict(type='dict',
|
||||
options=dict(
|
||||
not_classified=dict(type='bool', default=True),
|
||||
information=dict(type='bool', default=True),
|
||||
warning=dict(type='bool', default=True),
|
||||
average=dict(type='bool', default=True),
|
||||
high=dict(type='bool', default=True),
|
||||
disaster=dict(type='bool', default=True)),
|
||||
default=dict(
|
||||
not_classified=True,
|
||||
information=True,
|
||||
warning=True,
|
||||
average=True,
|
||||
high=True,
|
||||
disaster=True
|
||||
)),
|
||||
active=dict(type='bool', default=True)
|
||||
)),
|
||||
type=dict(type='str', default='Zabbix user', choices=['Zabbix user', 'Zabbix admin', 'Zabbix super admin']),
|
||||
state=dict(type='str', default="present", choices=['present', 'absent']),
|
||||
timeout=dict(type='int', default=10)
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if not HAS_ZABBIX_API:
|
||||
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'),
|
||||
exception=ZBX_IMP_ERR)
|
||||
|
||||
server_url = module.params['server_url']
|
||||
login_user = module.params['login_user']
|
||||
login_password = module.params['login_password']
|
||||
http_login_user = module.params['http_login_user']
|
||||
http_login_password = module.params['http_login_password']
|
||||
validate_certs = module.params['validate_certs']
|
||||
alias = module.params['alias']
|
||||
name = module.params['name']
|
||||
surname = module.params['surname']
|
||||
usrgrps = module.params['usrgrps']
|
||||
passwd = module.params['passwd']
|
||||
override_passwd = module.params['override_passwd']
|
||||
lang = module.params['lang']
|
||||
theme = module.params['theme']
|
||||
autologin = module.params['autologin']
|
||||
autologout = module.params['autologout']
|
||||
refresh = module.params['refresh']
|
||||
rows_per_page = module.params['rows_per_page']
|
||||
after_login_url = module.params['after_login_url']
|
||||
user_medias = module.params['user_medias']
|
||||
user_type = module.params['type']
|
||||
state = module.params['state']
|
||||
timeout = module.params['timeout']
|
||||
|
||||
if autologin:
|
||||
autologin = '1'
|
||||
else:
|
||||
autologin = '0'
|
||||
|
||||
user_type_dict = {
|
||||
'Zabbix user': '1',
|
||||
'Zabbix admin': '2',
|
||||
'Zabbix super admin': '3'
|
||||
}
|
||||
user_type = user_type_dict[user_type]
|
||||
|
||||
zbx = None
|
||||
|
||||
# login to zabbix
|
||||
try:
|
||||
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
|
||||
validate_certs=validate_certs)
|
||||
zbx.login(login_user, login_password)
|
||||
atexit.register(zbx.logout)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
|
||||
|
||||
user = User(module, zbx)
|
||||
|
||||
user_ids = {}
|
||||
zbx_user = user.check_user_exist(alias)
|
||||
if state == 'present':
|
||||
user_group_ids = user.get_usergroupid_by_user_group_name(usrgrps)
|
||||
if zbx_user:
|
||||
diff_check_result, diff_params = user.user_parameter_difference_check(zbx_user, alias, name, surname,
|
||||
user_group_ids, passwd, lang, theme,
|
||||
autologin, autologout, refresh,
|
||||
rows_per_page, after_login_url,
|
||||
user_medias, user_type,
|
||||
override_passwd)
|
||||
|
||||
if not module.check_mode and diff_check_result:
|
||||
user_ids = user.update_user(zbx_user, alias, name, surname, user_group_ids, passwd, lang,
|
||||
theme, autologin, autologout, refresh, rows_per_page, after_login_url,
|
||||
user_medias, user_type, override_passwd)
|
||||
else:
|
||||
diff_check_result = True
|
||||
user_ids, diff_params = user.add_user(alias, name, surname, user_group_ids, passwd, lang, theme, autologin,
|
||||
autologout, refresh, rows_per_page, after_login_url, user_medias,
|
||||
user_type)
|
||||
|
||||
if state == 'absent':
|
||||
if zbx_user:
|
||||
diff_check_result = True
|
||||
user_ids, diff_params = user.delete_user(zbx_user, alias)
|
||||
else:
|
||||
diff_check_result = False
|
||||
diff_params = {}
|
||||
|
||||
if not module.check_mode:
|
||||
if user_ids:
|
||||
module.exit_json(changed=True, user_ids=user_ids)
|
||||
else:
|
||||
module.exit_json(changed=False)
|
||||
else:
|
||||
if diff_check_result:
|
||||
module.exit_json(changed=True, diff=diff_params)
|
||||
else:
|
||||
module.exit_json(changed=False, diff=diff_params)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
175
plugins/modules/monitoring/zabbix/zabbix_user_info.py
Normal file
175
plugins/modules/monitoring/zabbix/zabbix_user_info.py
Normal file
@@ -0,0 +1,175 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2019, sky-joker
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
module: zabbix_user_info
|
||||
short_description: Gather information about Zabbix user
|
||||
author:
|
||||
- sky-joker (@sky-joker)
|
||||
description:
|
||||
- This module allows you to search for Zabbix user entries.
|
||||
requirements:
|
||||
- "python >= 2.6"
|
||||
- "zabbix-api >= 0.5.4"
|
||||
options:
|
||||
alias:
|
||||
description:
|
||||
- Name of the user alias in Zabbix.
|
||||
required: true
|
||||
type: str
|
||||
extends_documentation_fragment:
|
||||
- community.general.zabbix
|
||||
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Get zabbix user info
|
||||
zabbix_user_info:
|
||||
server_url: "http://zabbix.example.com/zabbix/"
|
||||
login_user: admin
|
||||
login_password: secret
|
||||
alias: example
|
||||
'''
|
||||
|
||||
RETURN = '''
|
||||
zabbix_user:
|
||||
description: example
|
||||
returned: always
|
||||
type: dict
|
||||
sample: {
|
||||
"alias": "example",
|
||||
"attempt_clock": "0",
|
||||
"attempt_failed": "0",
|
||||
"attempt_ip": "",
|
||||
"autologin": "0",
|
||||
"autologout": "0",
|
||||
"debug_mode": "0",
|
||||
"gui_access": "0",
|
||||
"lang": "en_GB",
|
||||
"medias": [
|
||||
{
|
||||
"active": "0",
|
||||
"mediaid": "668",
|
||||
"mediatypeid": "1",
|
||||
"period": "1-7,00:00-24:00",
|
||||
"sendto": "example@example.com",
|
||||
"severity": "63",
|
||||
"userid": "660"
|
||||
}
|
||||
],
|
||||
"name": "user",
|
||||
"refresh": "30s",
|
||||
"rows_per_page": "50",
|
||||
"surname": "example",
|
||||
"theme": "default",
|
||||
"type": "1",
|
||||
"url": "",
|
||||
"userid": "660",
|
||||
"users_status": "0",
|
||||
"usrgrps": [
|
||||
{
|
||||
"debug_mode": "0",
|
||||
"gui_access": "0",
|
||||
"name": "Guests",
|
||||
"users_status": "0",
|
||||
"usrgrpid": "8"
|
||||
}
|
||||
]
|
||||
}
|
||||
'''
|
||||
|
||||
import atexit
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from zabbix_api import ZabbixAPI
|
||||
|
||||
HAS_ZABBIX_API = True
|
||||
except ImportError:
|
||||
ZBX_IMP_ERR = traceback.format_exc()
|
||||
HAS_ZABBIX_API = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
|
||||
|
||||
class User(object):
|
||||
def __init__(self, module, zbx):
|
||||
self._module = module
|
||||
self._zapi = zbx
|
||||
|
||||
def get_user_by_user_alias(self, alias):
|
||||
zabbix_user = ""
|
||||
try:
|
||||
zabbix_user = self._zapi.user.get({'output': 'extend', 'filter': {'alias': alias},
|
||||
'getAccess': True, 'selectMedias': 'extend',
|
||||
'selectUsrgrps': 'extend'})
|
||||
except Exception as e:
|
||||
self._zapi.logout()
|
||||
self._module.fail_json(msg="Failed to get user information: %s" % e)
|
||||
|
||||
if not zabbix_user:
|
||||
zabbix_user = {}
|
||||
else:
|
||||
zabbix_user = zabbix_user[0]
|
||||
|
||||
return zabbix_user
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
server_url=dict(type='str', required=True, aliases=['url']),
|
||||
login_user=dict(type='str', required=True),
|
||||
login_password=dict(type='str', required=True, no_log=True),
|
||||
http_login_user=dict(type='str', required=False, default=None),
|
||||
http_login_password=dict(type='str', required=False, default=None, no_log=True),
|
||||
validate_certs=dict(type='bool', required=False, default=True),
|
||||
alias=dict(type='str', required=True),
|
||||
timeout=dict(type='int', default=10)
|
||||
),
|
||||
supports_check_mode=True
|
||||
)
|
||||
|
||||
if not HAS_ZABBIX_API:
|
||||
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'),
|
||||
exception=ZBX_IMP_ERR)
|
||||
|
||||
server_url = module.params['server_url']
|
||||
login_user = module.params['login_user']
|
||||
login_password = module.params['login_password']
|
||||
http_login_user = module.params['http_login_user']
|
||||
http_login_password = module.params['http_login_password']
|
||||
validate_certs = module.params['validate_certs']
|
||||
alias = module.params['alias']
|
||||
timeout = module.params['timeout']
|
||||
|
||||
zbx = None
|
||||
|
||||
# login to zabbix
|
||||
try:
|
||||
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
|
||||
validate_certs=validate_certs)
|
||||
zbx.login(login_user, login_password)
|
||||
atexit.register(zbx.logout)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
|
||||
|
||||
user = User(module, zbx)
|
||||
zabbix_user = user.get_user_by_user_alias(alias)
|
||||
zbx.logout()
|
||||
module.exit_json(changed=False, zabbix_user=zabbix_user)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
339
plugins/modules/monitoring/zabbix/zabbix_valuemap.py
Normal file
339
plugins/modules/monitoring/zabbix/zabbix_valuemap.py
Normal file
@@ -0,0 +1,339 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# (c) 2019, Ruben Tsirunyan <rubentsirunyan@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1',
|
||||
'status': ['preview'],
|
||||
'supported_by': 'community'}
|
||||
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: zabbix_valuemap
|
||||
short_description: Create/update/delete Zabbix value maps
|
||||
description:
|
||||
- This module allows you to create, modify and delete Zabbix value maps.
|
||||
author:
|
||||
- "Ruben Tsirunyan (@rubentsirunyan)"
|
||||
requirements:
|
||||
- "zabbix-api >= 0.5.4"
|
||||
options:
|
||||
name:
|
||||
type: 'str'
|
||||
description:
|
||||
- Name of the value map.
|
||||
required: true
|
||||
state:
|
||||
type: 'str'
|
||||
description:
|
||||
- State of the value map.
|
||||
- On C(present), it will create a value map if it does not exist or update the value map if the associated data is different.
|
||||
- On C(absent), it will remove the value map if it exists.
|
||||
choices: ['present', 'absent']
|
||||
default: 'present'
|
||||
mappings:
|
||||
type: 'list'
|
||||
elements: dict
|
||||
description:
|
||||
- List of value mappings for the value map.
|
||||
- Required when I(state=present).
|
||||
suboptions:
|
||||
value:
|
||||
type: 'str'
|
||||
description: Original value.
|
||||
required: true
|
||||
map_to:
|
||||
type: 'str'
|
||||
description: Value to which the original value is mapped to.
|
||||
required: true
|
||||
|
||||
extends_documentation_fragment:
|
||||
- community.general.zabbix
|
||||
|
||||
'''
|
||||
|
||||
RETURN = r'''
|
||||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
- name: Create a value map
|
||||
local_action:
|
||||
module: zabbix_valuemap
|
||||
server_url: http://zabbix.example.com
|
||||
login_user: username
|
||||
login_password: password
|
||||
name: Numbers
|
||||
mappings:
|
||||
- value: 1
|
||||
map_to: one
|
||||
- value: 2
|
||||
map_to: two
|
||||
state: present
|
||||
'''
|
||||
|
||||
|
||||
import atexit
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from zabbix_api import ZabbixAPI
|
||||
HAS_ZABBIX_API = True
|
||||
except ImportError:
|
||||
ZBX_IMP_ERR = traceback.format_exc()
|
||||
HAS_ZABBIX_API = False
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
|
||||
|
||||
def construct_parameters(**kwargs):
|
||||
"""Translates data to a format suitable for Zabbix API
|
||||
|
||||
Args:
|
||||
**kwargs: Arguments passed to the module.
|
||||
|
||||
Returns:
|
||||
A dictionary of arguments in a format that is understandable by Zabbix API.
|
||||
"""
|
||||
if kwargs['mappings'] is None:
|
||||
return dict(
|
||||
name=kwargs['name']
|
||||
)
|
||||
return dict(
|
||||
name=kwargs['name'],
|
||||
mappings=[
|
||||
dict(
|
||||
value=mapping['value'],
|
||||
newvalue=mapping['map_to']
|
||||
) for mapping in kwargs['mappings']
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def check_if_valuemap_exists(module, zbx, name):
|
||||
"""Checks if value map exists.
|
||||
|
||||
Args:
|
||||
module: AnsibleModule object
|
||||
zbx: ZabbixAPI object
|
||||
name: Zabbix valuemap name
|
||||
|
||||
Returns:
|
||||
tuple: First element is True if valuemap exists and False otherwise.
|
||||
Second element is a dictionary of valuemap object if it exists.
|
||||
"""
|
||||
try:
|
||||
valuemap_list = zbx.valuemap.get({
|
||||
'output': 'extend',
|
||||
'selectMappings': 'extend',
|
||||
'filter': {'name': [name]}
|
||||
})
|
||||
if len(valuemap_list) < 1:
|
||||
return False, None
|
||||
else:
|
||||
return True, valuemap_list[0]
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to get ID of the valuemap '{name}': {e}".format(name=name, e=e))
|
||||
|
||||
|
||||
def diff(existing, new):
|
||||
"""Constructs the diff for Ansible's --diff option.
|
||||
|
||||
Args:
|
||||
existing (dict): Existing valuemap data.
|
||||
new (dict): New valuemap data.
|
||||
|
||||
Returns:
|
||||
A dictionary like {'before': existing, 'after': new}
|
||||
with filtered empty values.
|
||||
"""
|
||||
before = {}
|
||||
after = {}
|
||||
for key in new:
|
||||
before[key] = existing[key]
|
||||
if new[key] is None:
|
||||
after[key] = ''
|
||||
else:
|
||||
after[key] = new[key]
|
||||
return {'before': before, 'after': after}
|
||||
|
||||
|
||||
def get_update_params(module, zbx, existing_valuemap, **kwargs):
|
||||
"""Filters only the parameters that are different and need to be updated.
|
||||
|
||||
Args:
|
||||
module: AnsibleModule object.
|
||||
zbx: ZabbixAPI object.
|
||||
existing_valuemap (dict): Existing valuemap.
|
||||
**kwargs: Parameters for the new valuemap.
|
||||
|
||||
Returns:
|
||||
A tuple where the first element is a dictionary of parameters
|
||||
that need to be updated and the second one is a dictionary
|
||||
returned by diff() function with
|
||||
existing valuemap data and new params passed to it.
|
||||
"""
|
||||
|
||||
params_to_update = {}
|
||||
if sorted(existing_valuemap['mappings'], key=lambda k: k['value']) != sorted(kwargs['mappings'], key=lambda k: k['value']):
|
||||
params_to_update['mappings'] = kwargs['mappings']
|
||||
return params_to_update, diff(existing_valuemap, kwargs)
|
||||
|
||||
|
||||
def delete_valuemap(module, zbx, valuemap_id):
|
||||
try:
|
||||
return zbx.valuemap.delete([valuemap_id])
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to delete valuemap '{_id}': {e}".format(_id=valuemap_id, e=e))
|
||||
|
||||
|
||||
def update_valuemap(module, zbx, **kwargs):
|
||||
try:
|
||||
valuemap_id = zbx.valuemap.update(kwargs)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to update valuemap '{_id}': {e}".format(_id=kwargs['valuemapid'], e=e))
|
||||
|
||||
|
||||
def create_valuemap(module, zbx, **kwargs):
|
||||
try:
|
||||
valuemap_id = zbx.valuemap.create(kwargs)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to create valuemap '{name}': {e}".format(name=kwargs['description'], e=e))
|
||||
|
||||
|
||||
def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
server_url=dict(type='str', required=True, aliases=['url']),
|
||||
login_user=dict(type='str', required=True),
|
||||
login_password=dict(type='str', required=True, no_log=True),
|
||||
http_login_user=dict(type='str', required=False, default=None),
|
||||
http_login_password=dict(type='str', required=False, default=None, no_log=True),
|
||||
validate_certs=dict(type='bool', required=False, default=True),
|
||||
name=dict(type='str', required=True),
|
||||
state=dict(type='str', default='present', choices=['present', 'absent']),
|
||||
mappings=dict(
|
||||
type='list',
|
||||
elements='dict',
|
||||
options=dict(
|
||||
value=dict(type='str', required=True),
|
||||
map_to=dict(type='str', required=True)
|
||||
)
|
||||
),
|
||||
timeout=dict(type='int', default=10)
|
||||
),
|
||||
supports_check_mode=True,
|
||||
required_if=[
|
||||
['state', 'present', ['mappings']],
|
||||
]
|
||||
)
|
||||
|
||||
if not HAS_ZABBIX_API:
|
||||
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
|
||||
|
||||
server_url = module.params['server_url']
|
||||
login_user = module.params['login_user']
|
||||
login_password = module.params['login_password']
|
||||
http_login_user = module.params['http_login_user']
|
||||
http_login_password = module.params['http_login_password']
|
||||
validate_certs = module.params['validate_certs']
|
||||
name = module.params['name']
|
||||
state = module.params['state']
|
||||
mappings = module.params['mappings']
|
||||
timeout = module.params['timeout']
|
||||
|
||||
zbx = None
|
||||
# login to zabbix
|
||||
try:
|
||||
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
|
||||
validate_certs=validate_certs)
|
||||
zbx.login(login_user, login_password)
|
||||
atexit.register(zbx.logout)
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
|
||||
|
||||
valuemap_exists, valuemap_object = check_if_valuemap_exists(module, zbx, name)
|
||||
|
||||
parameters = construct_parameters(
|
||||
name=name,
|
||||
mappings=mappings
|
||||
)
|
||||
|
||||
if valuemap_exists:
|
||||
valuemap_id = valuemap_object['valuemapid']
|
||||
if state == 'absent':
|
||||
if module.check_mode:
|
||||
module.exit_json(
|
||||
changed=True,
|
||||
msg="Value map would have been deleted. Name: {name}, ID: {_id}".format(
|
||||
name=name,
|
||||
_id=valuemap_id
|
||||
)
|
||||
)
|
||||
valuemap_id = delete_valuemap(module, zbx, valuemap_id)
|
||||
module.exit_json(
|
||||
changed=True,
|
||||
msg="Value map deleted. Name: {name}, ID: {_id}".format(
|
||||
name=name,
|
||||
_id=valuemap_id
|
||||
)
|
||||
)
|
||||
else:
|
||||
params_to_update, diff = get_update_params(module, zbx, valuemap_object, **parameters)
|
||||
if params_to_update == {}:
|
||||
module.exit_json(
|
||||
changed=False,
|
||||
msg="Value map is up to date: {name}".format(name=name)
|
||||
)
|
||||
else:
|
||||
if module.check_mode:
|
||||
module.exit_json(
|
||||
changed=True,
|
||||
diff=diff,
|
||||
msg="Value map would have been updated. Name: {name}, ID: {_id}".format(
|
||||
name=name,
|
||||
_id=valuemap_id
|
||||
)
|
||||
)
|
||||
valuemap_id = update_valuemap(
|
||||
module, zbx,
|
||||
valuemapid=valuemap_id,
|
||||
**params_to_update
|
||||
)
|
||||
module.exit_json(
|
||||
changed=True,
|
||||
diff=diff,
|
||||
msg="Value map updated. Name: {name}, ID: {_id}".format(
|
||||
name=name,
|
||||
_id=valuemap_id
|
||||
)
|
||||
)
|
||||
else:
|
||||
if state == "absent":
|
||||
module.exit_json(changed=False)
|
||||
else:
|
||||
if module.check_mode:
|
||||
module.exit_json(
|
||||
changed=True,
|
||||
msg="Value map would have been created. Name: {name}, ID: {_id}".format(
|
||||
name=name,
|
||||
_id=valuemap_id
|
||||
)
|
||||
)
|
||||
valuemap_id = create_valuemap(module, zbx, **parameters)
|
||||
module.exit_json(
|
||||
changed=True,
|
||||
msg="Value map created: {name}, ID: {_id}".format(
|
||||
name=name,
|
||||
_id=valuemap_id
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Reference in New Issue
Block a user